WebRTC视频帧率比特率问题

创建于:2024年11月1日

使用 O1 Mini 回答 Chat01

问题

class ClientActivity4 : ComponentActivity() {
private lateinit var socket: Socket
private var localPeer: PeerConnection? = null
private var localView: SurfaceViewRenderer? = null
private var remoteView: SurfaceViewRenderer? = null
private var localEglBase: EglBase? = null
private var remoteEglBase: EglBase? = null
private val pendingIceCandidates = mutableListOf<IceCandidate>()
private var currentRoom: String? = null
private val localPeerLiveData = MutableLiveData<PeerConnection?>(null)
private lateinit var signalingServerUrl: String
private lateinit var stunUrl: String
private lateinit var turnUrl: String
private lateinit var turnUsername: String
private lateinit var turnPassword: String
private val TAG: String = "WebRTC-Client"

text
// State variables for frame rate and bitrate private val frameRateState = mutableDoubleStateOf(0.0) private val bitrateState = mutableLongStateOf(0L) // Variables to track previous stats for calculations private var prevFramesDecoded = 0.0 private var prevBytesReceived = 0.0 private var prevTimestamp = 0.0 // Job for stats collection coroutine private var statsJob: Job? = null override fun onCreate(savedInstanceState: Bundle?) { super.onCreate(savedInstanceState) // 设置角色和房间 currentRoom = intent.getStringExtra("room") ?: "default-room" signalingServerUrl = intent.getStringExtra("signalingServerUrl") ?: "https://wstszx.us.kg" stunUrl = intent.getStringExtra("stunUrl") ?: "stun:stun.wstszx.us.kg:3478" turnUrl = intent.getStringExtra("turnUrl") ?: "turn:turn.wstszx.us.kg:5349?transport=udp" turnUsername = intent.getStringExtra("turnUsername") ?: "wstszx" turnPassword = intent.getStringExtra("turnPassword") ?: "930379" Log.d(TAG, "onCreate: 角色 = 客户端, 房间 = $currentRoom, 信令服务器 = $signalingServerUrl, STUN URL = $stunUrl, TURN URL = $turnUrl") initializeSocketIO() setContent { WebRTCComposeLayout() } requestPermissionsIfNeeded() } private fun initializeSocketIO() { val protocol = if (signalingServerUrl.startsWith("https")) "https" else "http" val socketUrl = signalingServerUrl val options = IO.Options().apply { transports = arrayOf("websocket") secure = protocol == "https" path = "/socket.io/" } try { socket = IO.socket(socketUrl, options) socket.on(Socket.EVENT_CONNECT) { Log.d(TAG, "Socket 已连接") socket.emit("join", currentRoom) Log.d(TAG, "已加入房间: $currentRoom") initiateCall() } socket.on(Socket.EVENT_CONNECT_ERROR) { args -> if (args.isNotEmpty()) { val error = args[0] Log.e(TAG, "Socket 连接错误: $error") } } socket.on(Socket.EVENT_DISCONNECT) { args -> if (args.isNotEmpty()) { val reason = args[0] Log.d(TAG, "Socket 已断开: $reason") } } socket.on("signal") { args -> Log.d(TAG, "收到信令: ${args[0]}") if (args.isNotEmpty() && args[0] is JSONObject) { val data = args[0] as JSONObject handleSignalingData(data) } } socket.connect() Log.d(TAG, "正在连接到 Socket: $socketUrl...") } catch (e: Exception) { Log.e(TAG, "连接 Socket 时出错: ${e.message}") } } private fun requestPermissionsIfNeeded() { if (checkSelfPermission(Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED || checkSelfPermission(Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED || checkSelfPermission(Manifest.permission.INTERNET) != PackageManager.PERMISSION_GRANTED || checkSelfPermission(Manifest.permission.ACCESS_NETWORK_STATE) != PackageManager.PERMISSION_GRANTED ) { requestPermissions( arrayOf( Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO, Manifest.permission.INTERNET, Manifest.permission.ACCESS_NETWORK_STATE ), 1 ) } } @Composable fun WebRTCComposeLayout() { val context = LocalContext.current var peerConnectionFactory: PeerConnectionFactory? = null var localVideoTrack: VideoTrack? by remember { mutableStateOf(null) } Surface(color = Color.Black) { Box(modifier = Modifier.fillMaxSize()) { // Remote Video View AndroidView( factory = { remoteView = SurfaceViewRenderer(it).apply { setZOrderMediaOverlay(false) } remoteView!! }, modifier = Modifier.fillMaxSize(), update = { if (remoteEglBase?.eglBaseContext == null) { remoteEglBase = EglBase.create() it.init(remoteEglBase!!.eglBaseContext, null) it.setMirror(false) } } ) // Local Video View AndroidView( factory = { localView = SurfaceViewRenderer(it).apply { setZOrderMediaOverlay(true) } localView!! }, modifier = Modifier .size(100.dp, 150.dp) .align(Alignment.BottomEnd) .padding(16.dp), update = { if (localEglBase == null) { localEglBase = EglBase.create() it.init(localEglBase!!.eglBaseContext, null) it.setMirror(true) } } ) // Display Frame Rate and Bitrate Text( text = "Frame Rate: ${frameRateState.doubleValue.roundToInt()} fps\nBitrate: ${bitrateState.longValue / 1000} kbps", color = Color.White, modifier = Modifier .align(Alignment.TopStart) .padding(16.dp) ) // Initialize PeerConnectionFactory and create PeerConnection LaunchedEffect(Unit) { val options = PeerConnectionFactory.InitializationOptions.builder(context) .createInitializationOptions() PeerConnectionFactory.initialize(options) val encoderFactory = DefaultVideoEncoderFactory( localEglBase!!.eglBaseContext, true, true ) val decoderFactory = DefaultVideoDecoderFactory(remoteEglBase!!.eglBaseContext) peerConnectionFactory = PeerConnectionFactory.builder() .setVideoEncoderFactory(encoderFactory) .setVideoDecoderFactory(decoderFactory) .createPeerConnectionFactory() initLocalVideo(context, localView, peerConnectionFactory!!, localEglBase!!) { localVideoTrack = it } createPeerConnection( context, peerConnectionFactory, localVideoTrack, remoteView!! ) { localPeer = it localPeerLiveData.postValue(it) } } } } } private fun initLocalVideo( context: Context, localView: SurfaceViewRenderer?, peerConnectionFactory: PeerConnectionFactory, eglBase: EglBase, onLocalVideoTrack: (VideoTrack) -> Unit ) { val videoCapturer = createCameraCapturer(context) val surfaceTextureHelper = SurfaceTextureHelper.create("CaptureThread", eglBase.eglBaseContext) val videoSource = peerConnectionFactory.createVideoSource(videoCapturer.isScreencast) videoCapturer.initialize(surfaceTextureHelper, context, videoSource.capturerObserver) videoCapturer.startCapture(1920, 1080, 60) val localVideoTrack = peerConnectionFactory.createVideoTrack("video_track", videoSource) localVideoTrack.addSink(localView) val audioSource = peerConnectionFactory.createAudioSource(MediaConstraints()) val localAudioTrack = peerConnectionFactory.createAudioTrack("audio_track", audioSource) // 添加音视频轨道到本地流 val mediaStream = peerConnectionFactory.createLocalMediaStream("local_stream") mediaStream.addTrack(localAudioTrack) mediaStream.addTrack(localVideoTrack) onLocalVideoTrack(localVideoTrack) } private fun createCameraCapturer(context: Context): CameraVideoCapturer { val camera2Enumerator = Camera2Enumerator(context) val deviceNames = camera2Enumerator.deviceNames for (deviceName in deviceNames) { if (camera2Enumerator.isFrontFacing(deviceName)) { return camera2Enumerator.createCapturer(deviceName, null) ?: throw IllegalStateException("无法创建前置摄像头捕获器") } } // 如果没有前置摄像头,则使用第一个摄像头 return camera2Enumerator.createCapturer(deviceNames[0], null) ?: throw IllegalStateException("无法创建摄像头捕获器") } private fun createPeerConnection( context: Context, peerConnectionFactory: PeerConnectionFactory, localVideoTrack: VideoTrack?, remoteView: SurfaceViewRenderer, onLocalPeerCreated: (PeerConnection) -> Unit ) { val iceServers = listOf( PeerConnection.IceServer.builder(stunUrl).createIceServer(), PeerConnection.IceServer.builder(turnUrl) .setUsername(turnUsername) .setPassword(turnPassword) .createIceServer() ) val rtcConfig = PeerConnection.RTCConfiguration(iceServers).apply { bundlePolicy = PeerConnection.BundlePolicy.MAXBUNDLE rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.REQUIRE tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.ENABLED continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_CONTINUALLY sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN } val localPeer = peerConnectionFactory.createPeerConnection(rtcConfig, object : PeerConnection.Observer { override fun onIceCandidate(iceCandidate: IceCandidate?) { iceCandidate?.let { Log.d(TAG, "ICE candidate: $it") val signalData = JSONObject().apply { put("type", "ice") put("candidate", JSONObject().apply { put("sdpMid", it.sdpMid) put("sdpMLineIndex", it.sdpMLineIndex) put("candidate", it.sdp) }) put("room", currentRoom) } socket.emit("signal", signalData) } } override fun onIceCandidatesRemoved(candidates: Array<out IceCandidate>?) { Log.d(TAG, "ICE candidates removed") } override fun onSignalingChange(newState: PeerConnection.SignalingState?) { Log.d(TAG, "Signaling state changed to: $newState") } override fun onIceConnectionChange(newState: PeerConnection.IceConnectionState?) { Log.d(TAG, "ICE connection state changed to: $newState") } override fun onIceConnectionReceivingChange(receiving: Boolean) { Log.d(TAG, "ICE connection receiving change: $receiving") } override fun onIceGatheringChange(newState: PeerConnection.IceGatheringState?) { Log.d(TAG, "ICE gathering state changed to: $newState") } override fun onAddStream(stream: MediaStream?) { Log.d(TAG, "Stream added") } override fun onRemoveStream(stream: MediaStream?) { Log.d(TAG, "Stream removed") } override fun onDataChannel(dataChannel: DataChannel?) { Log.d(TAG, "Data channel created") } override fun onRenegotiationNeeded() { Log.d(TAG, "Renegotiation needed") } override fun onAddTrack(receiver: RtpReceiver?, streams: Array<out MediaStream>?) { Log.d(TAG, "Track added") receiver?.track()?.let { track -> if (track is VideoTrack) { track.addSink(remoteView) } } } override fun onTrack(transceiver: RtpTransceiver?) { Log.d(TAG, "onTrack called") transceiver?.receiver?.track()?.let { track -> if (track is VideoTrack) { track.addSink(remoteView) } } } override fun onConnectionChange(newState: PeerConnection.PeerConnectionState?) { Log.d(TAG, "Connection state changed to: $newState") } }) localVideoTrack?.let { localPeer?.addTrack(it, listOf("local_stream")) } val audioSource = peerConnectionFactory.createAudioSource(MediaConstraints()) val localAudioTrack = peerConnectionFactory.createAudioTrack("audio_track", audioSource) localPeer?.addTrack(localAudioTrack, listOf("local_stream")) onLocalPeerCreated(localPeer!!) // Start collecting stats after peer connection is created startStatsCollection() } private fun initiateCall() { Log.d(TAG, "Initiating call...") val constraints = MediaConstraints().apply { mandatory.add(MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true")) mandatory.add(MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true")) } localPeer?.createOffer(object : SdpObserver { override fun onCreateSuccess(sessionDescription: SessionDescription?) { sessionDescription?.let { sdp -> localPeer?.setLocalDescription(object : SdpObserver { override fun onSetSuccess() { val signalData = JSONObject().apply { put("type", "offer") put("sdp", JSONObject().put("sdp", sdp.description)) put("room", currentRoom) } socket.emit("signal", signalData) } override fun onSetFailure(error: String?) { Log.e(TAG, "Set local description error: $error") } override fun onCreateSuccess(p0: SessionDescription?) {} override fun onCreateFailure(p0: String?) {} }, sdp) } } override fun onSetSuccess() {} override fun onCreateFailure(error: String?) { Log.e(TAG, "Create offer error: $error") } override fun onSetFailure(error: String?) {} }, constraints) } private fun handleSignalingData(data: JSONObject) { Log.d(TAG, "Handling signaling data: $data") when (data.getString("type")) { "answer" -> { Log.d(TAG, "Received answer") val sdp = SessionDescription( SessionDescription.Type.ANSWER, data.getJSONObject("sdp").getString("sdp") ) localPeer?.setRemoteDescription(object : SdpObserver { override fun onSetSuccess() { pendingIceCandidates.forEach { candidate -> localPeer?.addIceCandidate(candidate) } pendingIceCandidates.clear() Log.d(TAG, "Set remote description (answer) success") } override fun onSetFailure(error: String?) { Log.e(TAG, "Set remote description error: $error") } override fun onCreateSuccess(p0: SessionDescription?) {} override fun onCreateFailure(p0: String?) {} }, sdp) } "ice" -> { Log.d(TAG, "Received ICE candidate") val candidateData = data.getJSONObject("candidate") val candidate = IceCandidate( candidateData.getString("sdpMid"), candidateData.getInt("sdpMLineIndex"), candidateData.getString("candidate") ) if (localPeer?.remoteDescription != null) { localPeer?.addIceCandidate(candidate) } else { pendingIceCandidates.add(candidate) } } else -> { Log.e(TAG, "Unknown signaling type: ${data.getString("type")}") } } } private fun startStatsCollection() { statsJob = lifecycleScope.launch(Dispatchers.Default) { while (isActive) { delay(1000) // Collect stats every 1 second localPeer?.getStats { report -> parseStatsReport(report) } } } } private fun parseStatsReport(report: RTCStatsReport) { for (stats in report.statsMap.values) { if (stats.type == "inbound-rtp") { val kind = stats.members["kind"] as? String if (kind == "video") { val framesDecoded = stats.members["framesDecoded"] as? Double ?: 0.0 val bytesReceived = stats.members["bytesReceived"] as? Double ?: 0.0 val timestamp = stats.timestampUs / 1_000_000.0 // Convert to seconds if (prevTimestamp != 0.0) { val timeElapsed = timestamp - prevTimestamp val framesDelta = framesDecoded - prevFramesDecoded val bytesDelta = bytesReceived - prevBytesReceived val frameRate = framesDelta / timeElapsed val bitrate = (bytesDelta * 8) / timeElapsed // in bits per second // Update the state on the main thread lifecycleScope.launch(Dispatchers.Main) { frameRateState.doubleValue = frameRate bitrateState.longValue = bitrate.toLong() } } // Update previous values prevFramesDecoded = framesDecoded prevBytesReceived = bytesReceived prevTimestamp = timestamp } } } } override fun onDestroy() { super.onDestroy() statsJob?.cancel() socket.disconnect() localPeer?.dispose() localView?.release() remoteView?.release() localEglBase?.release() remoteEglBase?.release() }

}

目前这段代码并没有成功显示远程画面的帧率和码率,是什么原因

思考

Diagnosing display issues

I’m thinking through why the frame rate and bitrate aren't showing. It could be the stats collection, the state definitions, or the UI update's approach.

Identifying potential issues

Analyzing the use of mutable states and coroutine launch in parseStatsReport, I’m noting possible parsing errors, mismatched fields, and lifecycle problems in WebRTCComposeLayout's stats display.

I’m digging into the collectStats code to spot issues with stats retrieval or processing, particularly checking for inbound video RTP type and verifying data presence.

Ensuring Compose state updates

I'm thinking through the issue of ensuring state updates on the main thread and checking if the Text composable properly observes the state. Consider using mutableStateOf instead of mutableDoubleStateOf for accurate updates.

Revisiting state handling

I'm updating the state approach to be directly useable by Compose. Swapping mutableDoubleStateOf with mutableStateOf and setting values directly in parseStatsReport seems more efficient.

Investigating state handling

I'm checking if frameRateState and bitrateState are set up as mutableStateOf to update the UI correctly. Parsing stats might also be an issue.

Ensuring proper stats

Checking stats.type and stats.kind alignment with WebRTC stats. Adding logging for verification. Also, examining getStats implementation and UI proper observation.

Addressing coroutine lifecycle

I’m making sure the stats collection coroutine starts correctly and localPeer is initialized properly before starting stats collection, to prevent it from being prematurely canceled.

回答

There are several potential reasons why the remote video’s frame rate and bitrate are not being successfully displayed in your ClientActivity4 class. Below, I outline the most common issues and provide detailed steps to help you identify and resolve the problem.

1. Incorrect State Management in Compose

Issue:
The frameRateState and bitrateState are defined using mutableDoubleStateOf and mutableLongStateOf. These are not standard Compose state holders, which might prevent the UI from correctly observing and updating these values.

Solution:
Use mutableStateOf instead of mutableDoubleStateOf and mutableLongStateOf. This ensures that Compose properly observes changes and recomposes the UI accordingly.

Implementation:

kotlin
import androidx.compose.runtime.getValue import androidx.compose.runtime.mutableStateOf import androidx.compose.runtime.setValue // Replace your current state definitions with the following: private var frameRate by mutableStateOf(0.0) private var bitrate by mutableStateOf(0L)

Update State in parseStatsReport:

kotlin
private fun parseStatsReport(report: RTCStatsReport) { for (stats in report.statsMap.values) { if (stats.type == "inbound-rtp") { val kind = stats.members["kind"] as? String if (kind == "video") { val framesDecoded = stats.members["framesDecoded"] as? Double ?: 0.0 val bytesReceived = stats.members["bytesReceived"] as? Double ?: 0.0 val timestamp = stats.timestampUs / 1_000_000.0 // Convert to seconds if (prevTimestamp != 0.0) { val timeElapsed = timestamp - prevTimestamp val framesDelta = framesDecoded - prevFramesDecoded val bytesDelta = bytesReceived - prevBytesReceived val currentFrameRate = framesDelta / timeElapsed val currentBitrate = (bytesDelta * 8) / timeElapsed // in bits per second // Update the state on the main thread lifecycleScope.launch(Dispatchers.Main) { frameRate = currentFrameRate bitrate = currentBitrate.toLong() } } // Update previous values prevFramesDecoded = framesDecoded prevBytesReceived = bytesReceived prevTimestamp = timestamp } } } }

2. Incorrect Parsing of WebRTC Stats

Issue:
The parseStatsReport function might not be correctly identifying or accessing the required stats. This could be due to mismatched stat types or missing fields.

Solution:
Add comprehensive logging within the parseStatsReport to verify that the expected stats are being received and correctly parsed.

Implementation:

kotlin
private fun parseStatsReport(report: RTCStatsReport) { for (stats in report.statsMap.values) { Log.d(TAG, "Stats Type: ${stats.type}, ID: ${stats.id}, Timestamp: ${stats.timestampUs}") if (stats.type == "inbound-rtp") { val kind = stats.members["kind"] as? String Log.d(TAG, "Inbound RTP Kind: $kind") if (kind == "video") { val framesDecoded = stats.members["framesDecoded"] as? Double ?: 0.0 val bytesReceived = stats.members["bytesReceived"] as? Double ?: 0.0 val timestamp = stats.timestampUs / 1_000_000.0 // Convert to seconds Log.d(TAG, "Frames Decoded: $framesDecoded, Bytes Received: $bytesReceived, Timestamp: $timestamp") if (prevTimestamp != 0.0) { val timeElapsed = timestamp - prevTimestamp val framesDelta = framesDecoded - prevFramesDecoded val bytesDelta = bytesReceived - prevBytesReceived val currentFrameRate = framesDelta / timeElapsed val currentBitrate = (bytesDelta * 8) / timeElapsed // in bits per second Log.d(TAG, "Calculated Frame Rate: $currentFrameRate fps, Bitrate: $currentBitrate bps") // Update the state on the main thread lifecycleScope.launch(Dispatchers.Main) { frameRate = currentFrameRate bitrate = currentBitrate.toLong() } } // Update previous values prevFramesDecoded = framesDecoded prevBytesReceived = bytesReceived prevTimestamp = timestamp } } } }

Action Steps:

  1. Run the Application: Observe the logs to ensure that inbound-rtp stats for video are being captured.
  2. Verify Data Presence: Ensure that framesDecoded and bytesReceived are present and have valid values.
  3. Adjust Parsing Logic if Necessary: Depending on the actual stats structure, you might need to adjust the parsing logic.

3. UI Not Observing State Changes Properly

Issue:
Even if the state is correctly updated, if the Compose UI isn't properly observing these changes, the UI won't reflect the updated values.

Solution:
Ensure that the Text composable is correctly observing the state variables. With the updated mutableStateOf, Compose should automatically recompose when these variables change.

Implementation:

kotlin
@Composable fun WebRTCComposeLayout() { // ... existing code ... // Display Frame Rate and Bitrate Text( text = "Frame Rate: ${frameRate.roundToInt()} fps\nBitrate: ${bitrate / 1000} kbps", color = Color.White, modifier = Modifier .align(Alignment.TopStart) .padding(16.dp) ) // ... existing code ... }

4. Ensuring Stats Collection is Properly Initiated

Issue:
The startStatsCollection coroutine might not be properly initiated or could be prematurely canceled, preventing stats from being collected.

Solution:
Ensure that startStatsCollection is called only after the PeerConnection is fully established and that the coroutine remains active as long as needed.

Implementation:

  • Verify that startStatsCollection is called within the createPeerConnection method after the PeerConnection is successfully created.
  • Ensure that the PeerConnection remains active and that the coroutine isn't canceled unexpectedly.

5. Handling Different WebRTC Versions and Stats Structures

Issue:
Different versions of WebRTC might have varying structures for stats reports. Ensure compatibility with the WebRTC version you are using.

Solution:
Consult the WebRTC documentation for your specific version to verify the correct stat types and fields. Adjust your parsing logic accordingly.

Summary of Steps to Resolve the Issue:

  1. Update State Definitions:
    • Replace mutableDoubleStateOf and mutableLongStateOf with mutableStateOf.
  2. Enhance Logging:
    • Add detailed logs in the parseStatsReport to verify that stats are being correctly received and parsed.
  3. Ensure Proper UI Observation:
    • Confirm that the Text composable is correctly observing and displaying the updated state values.
  4. Verify Stats Collection Lifecycle:
    • Ensure that the stats collection coroutine is properly initiated and remains active.
  5. Check WebRTC Compatibility:
    • Ensure that your stats parsing logic aligns with the structure expected by your WebRTC version.

By systematically addressing each of these areas, you should be able to identify why the frame rate and bitrate are not being displayed and implement the necessary fixes to resolve the issue.

问题

给我增加打印帧率码率的代码

思考

Integrating print functionality

The user is coding in Kotlin for Android, working with Jetpack Compose and WebRTC. They want to add a logging feature to the parseStatsReport function to monitor frame rate and bitrate, enhancing their debugging process.

Enhancing code with logging

I’m detailing the benefits of integrating logging statements into the parseStatsReport function to track frame rate and bitrate values. This ensures accurate data representation and bolsters debugging.

Contributing ideas

Adding logging statements in the parseStatsReport function to print frame rate and bitrate values helps monitor system performance. This method tracks updates effectively, streamlining debugging and performance assessment.

Enhancing logging

I’m thinking through how to enrich the parseStatsReport function with logging to monitor frame rate and bitrate effectively. This involves not just adding Log.d statements but also, potentially, inserting additional logs throughout to ensure comprehensive tracking.

Monitoring progress

I’m working on updating the frame rate and bitrate calculations, printing key stats like frames decoded, bytes received, and timestamp. This ongoing vigilance helps in tracking performance effectively.

回答

要在您的 ClientActivity4 类中增加打印远程视频帧率和码率的日志,以便更好地调试和监控这些指标,可以按照以下步骤进行操作:

  1. 修改状态变量为 Compose 可观察的状态
  2. parseStatsReport 函数中添加日志打印
  3. 确保 Compose UI 正确观察和显示这些状态

下面是详细的步骤和代码示例:


1. 修改状态变量为 Compose 可观察的状态

首先,确保您的状态变量使用 mutableStateOf 来使 Compose 能够观察到它们的变化并自动重新组合 UI。

修改前:

kotlin
// State variables for frame rate and bitrate private val frameRateState = mutableDoubleStateOf(0.0) private val bitrateState = mutableLongStateOf(0L)

修改后:

kotlin
import androidx.compose.runtime.getValue import androidx.compose.runtime.mutableStateOf import androidx.compose.runtime.setValue // State variables for frame rate and bitrate private var frameRate by mutableStateOf(0.0) private var bitrate by mutableStateOf(0L)

2. 在 parseStatsReport 函数中添加日志打印

在解析统计报告的过程中,添加日志打印语句以输出当前的帧率和码率。

修改后的 parseStatsReport 函数:

kotlin
private fun parseStatsReport(report: RTCStatsReport) { for (stats in report.statsMap.values) { if (stats.type == "inbound-rtp") { val kind = stats.members["kind"] as? String if (kind == "video") { val framesDecoded = stats.members["framesDecoded"] as? Double ?: 0.0 val bytesReceived = stats.members["bytesReceived"] as? Double ?: 0.0 val timestamp = stats.timestampUs / 1_000_000.0 // 转换为秒 Log.d(TAG, "Frames Decoded: $framesDecoded, Bytes Received: $bytesReceived, Timestamp: $timestamp") if (prevTimestamp != 0.0) { val timeElapsed = timestamp - prevTimestamp val framesDelta = framesDecoded - prevFramesDecoded val bytesDelta = bytesReceived - prevBytesReceived val currentFrameRate = framesDelta / timeElapsed val currentBitrate = (bytesDelta * 8) / timeElapsed // 位每秒 Log.d(TAG, "Calculated Frame Rate: $currentFrameRate fps, Bitrate: $currentBitrate bps") // 在主线程更新状态 lifecycleScope.launch(Dispatchers.Main) { frameRate = currentFrameRate bitrate = currentBitrate.toLong() } } // 更新之前的值 prevFramesDecoded = framesDecoded prevBytesReceived = bytesReceived prevTimestamp = timestamp } } } }

解释:

  • 日志打印 framesDecodedbytesReceivedtimestamp 这些日志帮助您确认统计数据是否正确获取。
  • 计算后的帧率和码率也通过日志打印出来: 这有助于验证计算逻辑是否正确。

3. 确保 Compose UI 正确观察和显示这些状态

确保您的 Compose 布局正确地观察到这些状态变化并在 UI 上显示。

修改后的 WebRTCComposeLayout 函数:

kotlin
@Composable fun WebRTCComposeLayout() { val context = LocalContext.current var peerConnectionFactory: PeerConnectionFactory? = null var localVideoTrack: VideoTrack? by remember { mutableStateOf(null) } Surface(color = Color.Black) { Box(modifier = Modifier.fillMaxSize()) { // Remote Video View AndroidView( factory = { remoteView = SurfaceViewRenderer(it).apply { setZOrderMediaOverlay(false) } remoteView!! }, modifier = Modifier.fillMaxSize(), update = { if (remoteEglBase?.eglBaseContext == null) { remoteEglBase = EglBase.create() it.init(remoteEglBase!!.eglBaseContext, null) it.setMirror(false) } } ) // Local Video View AndroidView( factory = { localView = SurfaceViewRenderer(it).apply { setZOrderMediaOverlay(true) } localView!! }, modifier = Modifier .size(100.dp, 150.dp) .align(Alignment.BottomEnd) .padding(16.dp), update = { if (localEglBase == null) { localEglBase = EglBase.create() it.init(localEglBase!!.eglBaseContext, null) it.setMirror(true) } } ) // 显示帧率和码率 Text( text = "帧率: ${frameRate.roundToInt()} fps\n码率: ${bitrate / 1000} kbps", color = Color.White, modifier = Modifier .align(Alignment.TopStart) .padding(16.dp) ) // 初始化 PeerConnectionFactory 和创建 PeerConnection LaunchedEffect(Unit) { val options = PeerConnectionFactory.InitializationOptions.builder(context) .createInitializationOptions() PeerConnectionFactory.initialize(options) val encoderFactory = DefaultVideoEncoderFactory( localEglBase!!.eglBaseContext, true, true ) val decoderFactory = DefaultVideoDecoderFactory(remoteEglBase!!.eglBaseContext) peerConnectionFactory = PeerConnectionFactory.builder() .setVideoEncoderFactory(encoderFactory) .setVideoDecoderFactory(decoderFactory) .createPeerConnectionFactory() initLocalVideo(context, localView, peerConnectionFactory!!, localEglBase!!) { localVideoTrack = it } createPeerConnection( context, peerConnectionFactory, localVideoTrack, remoteView!! ) { localPeer = it localPeerLiveData.postValue(it) } } } } }

解释:

  • Text Composable: 这里显示了当前的帧率和码率。使用 frameRatebitrate 状态变量,这些变量会在 parseStatsReport 中更新。
  • 确保状态变量已正确绑定: 使用 by mutableStateOf 确保 Compose 能观察到状态变化并自动更新 UI。

4. 验证日志输出

确保您的日志输出级别设置为 DEBUG 或更低,以便能够看到 Log.d 的输出。在 Android Studio 的 Logcat 中,您可以通过以下步骤进行验证:

  1. 打开 Logcat: 在 Android Studio 中,点击底部的 Logcat 标签。
  2. 设置过滤器: 选择您的应用进程,并将日志级别设置为 DEBUG 或更低。
  3. 查看日志: 运行应用并观察 Logcat 中是否出现类似如下的日志输出:
    text
    D/WebRTC-Client: Frames Decoded: 120.0, Bytes Received: 1500000.0, Timestamp: 162514.0 D/WebRTC-Client: Calculated Frame Rate: 30.0 fps, Bitrate: 1200000 bps

5. 完整的修改代码示例

为了方便起见,以下是包括上述所有修改的完整代码片段:

kotlin
import android.Manifest import android.content.Context import android.content.pm.PackageManager import android.os.Bundle import android.util.Log import androidx.activity.ComponentActivity import androidx.activity.compose.setContent import androidx.compose.foundation.layout.* import androidx.compose.material3.Text import androidx.compose.runtime.* import androidx.compose.ui.Alignment import androidx.compose.ui.Modifier import androidx.compose.ui.graphics.Color import androidx.compose.ui.unit.dp import androidx.lifecycle.lifecycleScope import io.socket.client.IO import io.socket.client.Socket import org.json.JSONObject import org.webrtc.* import kotlinx.coroutines.Dispatchers import kotlinx.coroutines.Job import kotlinx.coroutines.delay import kotlinx.coroutines.launch import kotlin.math.roundToInt class ClientActivity4 : ComponentActivity() { private lateinit var socket: Socket private var localPeer: PeerConnection? = null private var localView: SurfaceViewRenderer? = null private var remoteView: SurfaceViewRenderer? = null private var localEglBase: EglBase? = null private var remoteEglBase: EglBase? = null private val pendingIceCandidates = mutableListOf<IceCandidate>() private var currentRoom: String? = null private val localPeerLiveData = MutableLiveData<PeerConnection?>(null) private lateinit var signalingServerUrl: String private lateinit var stunUrl: String private lateinit var turnUrl: String private lateinit var turnUsername: String private lateinit var turnPassword: String private val TAG: String = "WebRTC-Client" // State variables for frame rate and bitrate private var frameRate by mutableStateOf(0.0) private var bitrate by mutableStateOf(0L) // Variables to track previous stats for calculations private var prevFramesDecoded = 0.0 private var prevBytesReceived = 0.0 private var prevTimestamp = 0.0 // Job for stats collection coroutine private var statsJob: Job? = null override fun onCreate(savedInstanceState: Bundle?) { super.onCreate(savedInstanceState) // 设置角色和房间 currentRoom = intent.getStringExtra("room") ?: "default-room" signalingServerUrl = intent.getStringExtra("signalingServerUrl") ?: "https://wstszx.us.kg" stunUrl = intent.getStringExtra("stunUrl") ?: "stun:stun.wstszx.us.kg:3478" turnUrl = intent.getStringExtra("turnUrl") ?: "turn:turn.wstszx.us.kg:5349?transport=udp" turnUsername = intent.getStringExtra("turnUsername") ?: "wstszx" turnPassword = intent.getStringExtra("turnPassword") ?: "930379" Log.d(TAG, "onCreate: 角色 = 客户端, 房间 = $currentRoom, 信令服务器 = $signalingServerUrl, STUN URL = $stunUrl, TURN URL = $turnUrl") initializeSocketIO() setContent { WebRTCComposeLayout() } requestPermissionsIfNeeded() } private fun initializeSocketIO() { val protocol = if (signalingServerUrl.startsWith("https")) "https" else "http" val socketUrl = signalingServerUrl val options = IO.Options().apply { transports = arrayOf("websocket") secure = protocol == "https" path = "/socket.io/" } try { socket = IO.socket(socketUrl, options) socket.on(Socket.EVENT_CONNECT) { Log.d(TAG, "Socket 已连接") socket.emit("join", currentRoom) Log.d(TAG, "已加入房间: $currentRoom") initiateCall() } socket.on(Socket.EVENT_CONNECT_ERROR) { args -> if (args.isNotEmpty()) { val error = args[0] Log.e(TAG, "Socket 连接错误: $error") } } socket.on(Socket.EVENT_DISCONNECT) { args -> if (args.isNotEmpty()) { val reason = args[0] Log.d(TAG, "Socket 已断开: $reason") } } socket.on("signal") { args -> Log.d(TAG, "收到信令: ${args[0]}") if (args.isNotEmpty() && args[0] is JSONObject) { val data = args[0] as JSONObject handleSignalingData(data) } } socket.connect() Log.d(TAG, "正在连接到 Socket: $socketUrl...") } catch (e: Exception) { Log.e(TAG, "连接 Socket 时出错: ${e.message}") } } private fun requestPermissionsIfNeeded() { if (checkSelfPermission(Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED || checkSelfPermission(Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED || checkSelfPermission(Manifest.permission.INTERNET) != PackageManager.PERMISSION_GRANTED || checkSelfPermission(Manifest.permission.ACCESS_NETWORK_STATE) != PackageManager.PERMISSION_GRANTED ) { requestPermissions( arrayOf( Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO, Manifest.permission.INTERNET, Manifest.permission.ACCESS_NETWORK_STATE ), 1 ) } } @Composable fun WebRTCComposeLayout() { val context = LocalContext.current var peerConnectionFactory: PeerConnectionFactory? = null var localVideoTrack: VideoTrack? by remember { mutableStateOf(null) } Surface(color = Color.Black) { Box(modifier = Modifier.fillMaxSize()) { // Remote Video View AndroidView( factory = { remoteView = SurfaceViewRenderer(it).apply { setZOrderMediaOverlay(false) } remoteView!! }, modifier = Modifier.fillMaxSize(), update = { if (remoteEglBase?.eglBaseContext == null) { remoteEglBase = EglBase.create() it.init(remoteEglBase!!.eglBaseContext, null) it.setMirror(false) } } ) // Local Video View AndroidView( factory = { localView = SurfaceViewRenderer(it).apply { setZOrderMediaOverlay(true) } localView!! }, modifier = Modifier .size(100.dp, 150.dp) .align(Alignment.BottomEnd) .padding(16.dp), update = { if (localEglBase == null) { localEglBase = EglBase.create() it.init(localEglBase!!.eglBaseContext, null) it.setMirror(true) } } ) // 显示帧率和码率 Text( text = "帧率: ${frameRate.roundToInt()} fps\n码率: ${bitrate / 1000} kbps", color = Color.White, modifier = Modifier .align(Alignment.TopStart) .padding(16.dp) ) // 初始化 PeerConnectionFactory 和创建 PeerConnection LaunchedEffect(Unit) { val options = PeerConnectionFactory.InitializationOptions.builder(context) .createInitializationOptions() PeerConnectionFactory.initialize(options) val encoderFactory = DefaultVideoEncoderFactory( localEglBase!!.eglBaseContext, true, true ) val decoderFactory = DefaultVideoDecoderFactory(remoteEglBase!!.eglBaseContext) peerConnectionFactory = PeerConnectionFactory.builder() .setVideoEncoderFactory(encoderFactory) .setVideoDecoderFactory(decoderFactory) .createPeerConnectionFactory() initLocalVideo(context, localView, peerConnectionFactory!!, localEglBase!!) { localVideoTrack = it } createPeerConnection( context, peerConnectionFactory, localVideoTrack, remoteView!! ) { localPeer = it localPeerLiveData.postValue(it) } } } } } private fun initLocalVideo( context: Context, localView: SurfaceViewRenderer?, peerConnectionFactory: PeerConnectionFactory, eglBase: EglBase, onLocalVideoTrack: (VideoTrack) -> Unit ) { val videoCapturer = createCameraCapturer(context) val surfaceTextureHelper = SurfaceTextureHelper.create("CaptureThread", eglBase.eglBaseContext) val videoSource = peerConnectionFactory.createVideoSource(videoCapturer.isScreencast) videoCapturer.initialize(surfaceTextureHelper, context, videoSource.capturerObserver) videoCapturer.startCapture(1920, 1080, 60) val localVideoTrack = peerConnectionFactory.createVideoTrack("video_track", videoSource) localVideoTrack.addSink(localView) val audioSource = peerConnectionFactory.createAudioSource(MediaConstraints()) val localAudioTrack = peerConnectionFactory.createAudioTrack("audio_track", audioSource) // 添加音视频轨道到本地流 val mediaStream = peerConnectionFactory.createLocalMediaStream("local_stream") mediaStream.addTrack(localAudioTrack) mediaStream.addTrack(localVideoTrack) onLocalVideoTrack(localVideoTrack) } private fun createCameraCapturer(context: Context): CameraVideoCapturer { val camera2Enumerator = Camera2Enumerator(context) val deviceNames = camera2Enumerator.deviceNames for (deviceName in deviceNames) { if (camera2Enumerator.isFrontFacing(deviceName)) { return camera2Enumerator.createCapturer(deviceName, null) ?: throw IllegalStateException("无法创建前置摄像头捕获器") } } // 如果没有前置摄像头,则使用第一个摄像头 return camera2Enumerator.createCapturer(deviceNames[0], null) ?: throw IllegalStateException("无法创建摄像头捕获器") } private fun createPeerConnection( context: Context, peerConnectionFactory: PeerConnectionFactory, localVideoTrack: VideoTrack?, remoteView: SurfaceViewRenderer, onLocalPeerCreated: (PeerConnection) -> Unit ) { val iceServers = listOf( PeerConnection.IceServer.builder(stunUrl).createIceServer(), PeerConnection.IceServer.builder(turnUrl) .setUsername(turnUsername) .setPassword(turnPassword) .createIceServer() ) val rtcConfig = PeerConnection.RTCConfiguration(iceServers).apply { bundlePolicy = PeerConnection.BundlePolicy.MAXBUNDLE rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.REQUIRE tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.ENABLED continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_CONTINUALLY sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN } val localPeer = peerConnectionFactory.createPeerConnection(rtcConfig, object : PeerConnection.Observer { override fun onIceCandidate(iceCandidate: IceCandidate?) { iceCandidate?.let { Log.d(TAG, "ICE candidate: $it") val signalData = JSONObject().apply { put("type", "ice") put("candidate", JSONObject().apply { put("sdpMid", it.sdpMid) put("sdpMLineIndex", it.sdpMLineIndex) put("candidate", it.sdp) }) put("room", currentRoom) } socket.emit("signal", signalData) } } override fun onIceCandidatesRemoved(candidates: Array<out IceCandidate>?) { Log.d(TAG, "ICE candidates removed") } override fun onSignalingChange(newState: PeerConnection.SignalingState?) { Log.d(TAG, "Signaling state changed to: $newState") } override fun onIceConnectionChange(newState: PeerConnection.IceConnectionState?) { Log.d(TAG, "ICE connection state changed to: $newState") } override fun onIceConnectionReceivingChange(receiving: Boolean) { Log.d(TAG, "ICE connection receiving change: $receiving") } override fun onIceGatheringChange(newState: PeerConnection.IceGatheringState?) { Log.d(TAG, "ICE gathering state changed to: $newState") } override fun onAddStream(stream: MediaStream?) { Log.d(TAG, "Stream added") } override fun onRemoveStream(stream: MediaStream?) { Log.d(TAG, "Stream removed") } override fun onDataChannel(dataChannel: DataChannel?) { Log.d(TAG, "Data channel created") } override fun onRenegotiationNeeded() { Log.d(TAG, "Renegotiation needed") } override fun onAddTrack(receiver: RtpReceiver?, streams: Array<out MediaStream>?) { Log.d(TAG, "Track added") receiver?.track()?.let { track -> if (track is VideoTrack) { track.addSink(remoteView) } } } override fun onTrack(transceiver: RtpTransceiver?) { Log.d(TAG, "onTrack called") transceiver?.receiver?.track()?.let { track -> if (track is VideoTrack) { track.addSink(remoteView) } } } override fun onConnectionChange(newState: PeerConnection.PeerConnectionState?) { Log.d(TAG, "Connection state changed to: $newState") } }) localVideoTrack?.let { localPeer?.addTrack(it, listOf("local_stream")) } val audioSource = peerConnectionFactory.createAudioSource(MediaConstraints()) val localAudioTrack = peerConnectionFactory.createAudioTrack("audio_track", audioSource) localPeer?.addTrack(localAudioTrack, listOf("local_stream")) onLocalPeerCreated(localPeer!!) // 开始收集统计数据 startStatsCollection() } private fun initiateCall() { Log.d(TAG, "Initiating call...") val constraints = MediaConstraints().apply { mandatory.add(MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true")) mandatory.add(MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true")) } localPeer?.createOffer(object : SdpObserver { override fun onCreateSuccess(sessionDescription: SessionDescription?) { sessionDescription?.let { sdp -> localPeer?.setLocalDescription(object : SdpObserver { override fun onSetSuccess() { val signalData = JSONObject().apply { put("type", "offer") put("sdp", JSONObject().put("sdp", sdp.description)) put("room", currentRoom) } socket.emit("signal", signalData) } override fun onSetFailure(error: String?) { Log.e(TAG, "Set local description error: $error") } override fun onCreateSuccess(p0: SessionDescription?) {} override fun onCreateFailure(p0: String?) {} }, sdp) } } override fun onSetSuccess() {} override fun onCreateFailure(error: String?) { Log.e(TAG, "Create offer error: $error") } override fun onSetFailure(error: String?) {} }, constraints) } private fun handleSignalingData(data: JSONObject) { Log.d(TAG, "Handling signaling data: $data") when (data.getString("type")) { "answer" -> { Log.d(TAG, "Received answer") val sdp = SessionDescription( SessionDescription.Type.ANSWER, data.getJSONObject("sdp").getString("sdp") ) localPeer?.setRemoteDescription(object : SdpObserver { override fun onSetSuccess() { pendingIceCandidates.forEach { candidate -> localPeer?.addIceCandidate(candidate) } pendingIceCandidates.clear() Log.d(TAG, "Set remote description (answer) success") } override fun onSetFailure(error: String?) { Log.e(TAG, "Set remote description error: $error") } override fun onCreateSuccess(p0: SessionDescription?) {} override fun onCreateFailure(p0: String?) {} }, sdp) } "ice" -> { Log.d(TAG, "Received ICE candidate") val candidateData = data.getJSONObject("candidate") val candidate = IceCandidate( candidateData.getString("sdpMid"), candidateData.getInt("sdpMLineIndex"), candidateData.getString("candidate") ) if (localPeer?.remoteDescription != null) { localPeer?.addIceCandidate(candidate) } else { pendingIceCandidates.add(candidate) } } else -> { Log.e(TAG, "Unknown signaling type: ${data.getString("type")}") } } } private fun startStatsCollection() { statsJob = lifecycleScope.launch(Dispatchers.Default) { while (isActive) { delay(1000) // 每秒收集一次统计数据 localPeer?.getStats { report -> parseStatsReport(report) } } } } private fun parseStatsReport(report: RTCStatsReport) { for (stats in report.statsMap.values) { if (stats.type == "inbound-rtp") { val kind = stats.members["kind"] as? String if (kind == "video") { val framesDecoded = stats.members["framesDecoded"] as? Double ?: 0.0 val bytesReceived = stats.members["bytesReceived"] as? Double ?: 0.0 val timestamp = stats.timestampUs / 1_000_000.0 // 转换为秒 Log.d(TAG, "Frames Decoded: $framesDecoded, Bytes Received: $bytesReceived, Timestamp: $timestamp") if (prevTimestamp != 0.0) { val timeElapsed = timestamp - prevTimestamp val framesDelta = framesDecoded - prevFramesDecoded val bytesDelta = bytesReceived - prevBytesReceived val currentFrameRate = framesDelta / timeElapsed val currentBitrate = (bytesDelta * 8) / timeElapsed // 位每秒 Log.d(TAG, "Calculated Frame Rate: $currentFrameRate fps, Bitrate: $currentBitrate bps") // 在主线程更新状态 lifecycleScope.launch(Dispatchers.Main) { frameRate = currentFrameRate bitrate = currentBitrate.toLong() } } // 更新之前的值 prevFramesDecoded = framesDecoded prevBytesReceived = bytesReceived prevTimestamp = timestamp } } } } override fun onDestroy() { super.onDestroy() statsJob?.cancel() socket.disconnect() localPeer?.dispose() localView?.release() remoteView?.release() localEglBase?.release() remoteEglBase?.release() } }

6. 运行和验证

  1. 运行应用: 确保应用能够成功连接并建立 WebRTC 连接。
  2. 查看 Logcat: 打开 Android Studio 的 Logcat,过滤 TAG 为 WebRTC-Client
  3. 验证日志输出: 确保您可以看到类似如下的日志输出:
    text
    D/WebRTC-Client: Frames Decoded: 120.0, Bytes Received: 1500000.0, Timestamp: 162514.0 D/WebRTC-Client: Calculated Frame Rate: 30.0 fps, Bitrate: 1200000 bps
  4. 检查 UI 显示: 在应用的 UI 上,您应该能够看到实时更新的帧率和码率。

7. 其他建议

  • 权限检查: 确保应用在运行时已获得所有必要的权限(摄像头、麦克风、网络访问等)。
  • 错误处理: 添加更多的错误处理和日志打印,以便更好地调试潜在的问题。
  • 优化统计收集频率: 根据需要调整统计数据收集的频率(当前设置为每秒一次)。

通过以上步骤,您应该能够成功地打印和显示远程视频的帧率和码率,从而更好地监控 WebRTC 连接的性能。如果在实施过程中遇到任何问题,请随时向我寻求进一步的帮助!

分享这个问答