WebRTC视频帧率比特率问题
Créé le : 1 novembre 2024
Créé le : 1 novembre 2024
class ClientActivity4 : ComponentActivity() {
private lateinit var socket: Socket
private var localPeer: PeerConnection? = null
private var localView: SurfaceViewRenderer? = null
private var remoteView: SurfaceViewRenderer? = null
private var localEglBase: EglBase? = null
private var remoteEglBase: EglBase? = null
private val pendingIceCandidates = mutableListOf<IceCandidate>()
private var currentRoom: String? = null
private val localPeerLiveData = MutableLiveData<PeerConnection?>(null)
private lateinit var signalingServerUrl: String
private lateinit var stunUrl: String
private lateinit var turnUrl: String
private lateinit var turnUsername: String
private lateinit var turnPassword: String
private val TAG: String = "WebRTC-Client"
text// State variables for frame rate and bitrate private val frameRateState = mutableDoubleStateOf(0.0) private val bitrateState = mutableLongStateOf(0L) // Variables to track previous stats for calculations private var prevFramesDecoded = 0.0 private var prevBytesReceived = 0.0 private var prevTimestamp = 0.0 // Job for stats collection coroutine private var statsJob: Job? = null override fun onCreate(savedInstanceState: Bundle?) { super.onCreate(savedInstanceState) // 设置角色和房间 currentRoom = intent.getStringExtra("room") ?: "default-room" signalingServerUrl = intent.getStringExtra("signalingServerUrl") ?: "https://wstszx.us.kg" stunUrl = intent.getStringExtra("stunUrl") ?: "stun:stun.wstszx.us.kg:3478" turnUrl = intent.getStringExtra("turnUrl") ?: "turn:turn.wstszx.us.kg:5349?transport=udp" turnUsername = intent.getStringExtra("turnUsername") ?: "wstszx" turnPassword = intent.getStringExtra("turnPassword") ?: "930379" Log.d(TAG, "onCreate: 角色 = 客户端, 房间 = $currentRoom, 信令服务器 = $signalingServerUrl, STUN URL = $stunUrl, TURN URL = $turnUrl") initializeSocketIO() setContent { WebRTCComposeLayout() } requestPermissionsIfNeeded() } private fun initializeSocketIO() { val protocol = if (signalingServerUrl.startsWith("https")) "https" else "http" val socketUrl = signalingServerUrl val options = IO.Options().apply { transports = arrayOf("websocket") secure = protocol == "https" path = "/socket.io/" } try { socket = IO.socket(socketUrl, options) socket.on(Socket.EVENT_CONNECT) { Log.d(TAG, "Socket 已连接") socket.emit("join", currentRoom) Log.d(TAG, "已加入房间: $currentRoom") initiateCall() } socket.on(Socket.EVENT_CONNECT_ERROR) { args -> if (args.isNotEmpty()) { val error = args[0] Log.e(TAG, "Socket 连接错误: $error") } } socket.on(Socket.EVENT_DISCONNECT) { args -> if (args.isNotEmpty()) { val reason = args[0] Log.d(TAG, "Socket 已断开: $reason") } } socket.on("signal") { args -> Log.d(TAG, "收到信令: ${args[0]}") if (args.isNotEmpty() && args[0] is JSONObject) { val data = args[0] as JSONObject handleSignalingData(data) } } socket.connect() Log.d(TAG, "正在连接到 Socket: $socketUrl...") } catch (e: Exception) { Log.e(TAG, "连接 Socket 时出错: ${e.message}") } } private fun requestPermissionsIfNeeded() { if (checkSelfPermission(Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED || checkSelfPermission(Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED || checkSelfPermission(Manifest.permission.INTERNET) != PackageManager.PERMISSION_GRANTED || checkSelfPermission(Manifest.permission.ACCESS_NETWORK_STATE) != PackageManager.PERMISSION_GRANTED ) { requestPermissions( arrayOf( Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO, Manifest.permission.INTERNET, Manifest.permission.ACCESS_NETWORK_STATE ), 1 ) } } @Composable fun WebRTCComposeLayout() { val context = LocalContext.current var peerConnectionFactory: PeerConnectionFactory? = null var localVideoTrack: VideoTrack? by remember { mutableStateOf(null) } Surface(color = Color.Black) { Box(modifier = Modifier.fillMaxSize()) { // Remote Video View AndroidView( factory = { remoteView = SurfaceViewRenderer(it).apply { setZOrderMediaOverlay(false) } remoteView!! }, modifier = Modifier.fillMaxSize(), update = { if (remoteEglBase?.eglBaseContext == null) { remoteEglBase = EglBase.create() it.init(remoteEglBase!!.eglBaseContext, null) it.setMirror(false) } } ) // Local Video View AndroidView( factory = { localView = SurfaceViewRenderer(it).apply { setZOrderMediaOverlay(true) } localView!! }, modifier = Modifier .size(100.dp, 150.dp) .align(Alignment.BottomEnd) .padding(16.dp), update = { if (localEglBase == null) { localEglBase = EglBase.create() it.init(localEglBase!!.eglBaseContext, null) it.setMirror(true) } } ) // Display Frame Rate and Bitrate Text( text = "Frame Rate: ${frameRateState.doubleValue.roundToInt()} fps\nBitrate: ${bitrateState.longValue / 1000} kbps", color = Color.White, modifier = Modifier .align(Alignment.TopStart) .padding(16.dp) ) // Initialize PeerConnectionFactory and create PeerConnection LaunchedEffect(Unit) { val options = PeerConnectionFactory.InitializationOptions.builder(context) .createInitializationOptions() PeerConnectionFactory.initialize(options) val encoderFactory = DefaultVideoEncoderFactory( localEglBase!!.eglBaseContext, true, true ) val decoderFactory = DefaultVideoDecoderFactory(remoteEglBase!!.eglBaseContext) peerConnectionFactory = PeerConnectionFactory.builder() .setVideoEncoderFactory(encoderFactory) .setVideoDecoderFactory(decoderFactory) .createPeerConnectionFactory() initLocalVideo(context, localView, peerConnectionFactory!!, localEglBase!!) { localVideoTrack = it } createPeerConnection( context, peerConnectionFactory, localVideoTrack, remoteView!! ) { localPeer = it localPeerLiveData.postValue(it) } } } } } private fun initLocalVideo( context: Context, localView: SurfaceViewRenderer?, peerConnectionFactory: PeerConnectionFactory, eglBase: EglBase, onLocalVideoTrack: (VideoTrack) -> Unit ) { val videoCapturer = createCameraCapturer(context) val surfaceTextureHelper = SurfaceTextureHelper.create("CaptureThread", eglBase.eglBaseContext) val videoSource = peerConnectionFactory.createVideoSource(videoCapturer.isScreencast) videoCapturer.initialize(surfaceTextureHelper, context, videoSource.capturerObserver) videoCapturer.startCapture(1920, 1080, 60) val localVideoTrack = peerConnectionFactory.createVideoTrack("video_track", videoSource) localVideoTrack.addSink(localView) val audioSource = peerConnectionFactory.createAudioSource(MediaConstraints()) val localAudioTrack = peerConnectionFactory.createAudioTrack("audio_track", audioSource) // 添加音视频轨道到本地流 val mediaStream = peerConnectionFactory.createLocalMediaStream("local_stream") mediaStream.addTrack(localAudioTrack) mediaStream.addTrack(localVideoTrack) onLocalVideoTrack(localVideoTrack) } private fun createCameraCapturer(context: Context): CameraVideoCapturer { val camera2Enumerator = Camera2Enumerator(context) val deviceNames = camera2Enumerator.deviceNames for (deviceName in deviceNames) { if (camera2Enumerator.isFrontFacing(deviceName)) { return camera2Enumerator.createCapturer(deviceName, null) ?: throw IllegalStateException("无法创建前置摄像头捕获器") } } // 如果没有前置摄像头,则使用第一个摄像头 return camera2Enumerator.createCapturer(deviceNames[0], null) ?: throw IllegalStateException("无法创建摄像头捕获器") } private fun createPeerConnection( context: Context, peerConnectionFactory: PeerConnectionFactory, localVideoTrack: VideoTrack?, remoteView: SurfaceViewRenderer, onLocalPeerCreated: (PeerConnection) -> Unit ) { val iceServers = listOf( PeerConnection.IceServer.builder(stunUrl).createIceServer(), PeerConnection.IceServer.builder(turnUrl) .setUsername(turnUsername) .setPassword(turnPassword) .createIceServer() ) val rtcConfig = PeerConnection.RTCConfiguration(iceServers).apply { bundlePolicy = PeerConnection.BundlePolicy.MAXBUNDLE rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.REQUIRE tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.ENABLED continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_CONTINUALLY sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN } val localPeer = peerConnectionFactory.createPeerConnection(rtcConfig, object : PeerConnection.Observer { override fun onIceCandidate(iceCandidate: IceCandidate?) { iceCandidate?.let { Log.d(TAG, "ICE candidate: $it") val signalData = JSONObject().apply { put("type", "ice") put("candidate", JSONObject().apply { put("sdpMid", it.sdpMid) put("sdpMLineIndex", it.sdpMLineIndex) put("candidate", it.sdp) }) put("room", currentRoom) } socket.emit("signal", signalData) } } override fun onIceCandidatesRemoved(candidates: Array<out IceCandidate>?) { Log.d(TAG, "ICE candidates removed") } override fun onSignalingChange(newState: PeerConnection.SignalingState?) { Log.d(TAG, "Signaling state changed to: $newState") } override fun onIceConnectionChange(newState: PeerConnection.IceConnectionState?) { Log.d(TAG, "ICE connection state changed to: $newState") } override fun onIceConnectionReceivingChange(receiving: Boolean) { Log.d(TAG, "ICE connection receiving change: $receiving") } override fun onIceGatheringChange(newState: PeerConnection.IceGatheringState?) { Log.d(TAG, "ICE gathering state changed to: $newState") } override fun onAddStream(stream: MediaStream?) { Log.d(TAG, "Stream added") } override fun onRemoveStream(stream: MediaStream?) { Log.d(TAG, "Stream removed") } override fun onDataChannel(dataChannel: DataChannel?) { Log.d(TAG, "Data channel created") } override fun onRenegotiationNeeded() { Log.d(TAG, "Renegotiation needed") } override fun onAddTrack(receiver: RtpReceiver?, streams: Array<out MediaStream>?) { Log.d(TAG, "Track added") receiver?.track()?.let { track -> if (track is VideoTrack) { track.addSink(remoteView) } } } override fun onTrack(transceiver: RtpTransceiver?) { Log.d(TAG, "onTrack called") transceiver?.receiver?.track()?.let { track -> if (track is VideoTrack) { track.addSink(remoteView) } } } override fun onConnectionChange(newState: PeerConnection.PeerConnectionState?) { Log.d(TAG, "Connection state changed to: $newState") } }) localVideoTrack?.let { localPeer?.addTrack(it, listOf("local_stream")) } val audioSource = peerConnectionFactory.createAudioSource(MediaConstraints()) val localAudioTrack = peerConnectionFactory.createAudioTrack("audio_track", audioSource) localPeer?.addTrack(localAudioTrack, listOf("local_stream")) onLocalPeerCreated(localPeer!!) // Start collecting stats after peer connection is created startStatsCollection() } private fun initiateCall() { Log.d(TAG, "Initiating call...") val constraints = MediaConstraints().apply { mandatory.add(MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true")) mandatory.add(MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true")) } localPeer?.createOffer(object : SdpObserver { override fun onCreateSuccess(sessionDescription: SessionDescription?) { sessionDescription?.let { sdp -> localPeer?.setLocalDescription(object : SdpObserver { override fun onSetSuccess() { val signalData = JSONObject().apply { put("type", "offer") put("sdp", JSONObject().put("sdp", sdp.description)) put("room", currentRoom) } socket.emit("signal", signalData) } override fun onSetFailure(error: String?) { Log.e(TAG, "Set local description error: $error") } override fun onCreateSuccess(p0: SessionDescription?) {} override fun onCreateFailure(p0: String?) {} }, sdp) } } override fun onSetSuccess() {} override fun onCreateFailure(error: String?) { Log.e(TAG, "Create offer error: $error") } override fun onSetFailure(error: String?) {} }, constraints) } private fun handleSignalingData(data: JSONObject) { Log.d(TAG, "Handling signaling data: $data") when (data.getString("type")) { "answer" -> { Log.d(TAG, "Received answer") val sdp = SessionDescription( SessionDescription.Type.ANSWER, data.getJSONObject("sdp").getString("sdp") ) localPeer?.setRemoteDescription(object : SdpObserver { override fun onSetSuccess() { pendingIceCandidates.forEach { candidate -> localPeer?.addIceCandidate(candidate) } pendingIceCandidates.clear() Log.d(TAG, "Set remote description (answer) success") } override fun onSetFailure(error: String?) { Log.e(TAG, "Set remote description error: $error") } override fun onCreateSuccess(p0: SessionDescription?) {} override fun onCreateFailure(p0: String?) {} }, sdp) } "ice" -> { Log.d(TAG, "Received ICE candidate") val candidateData = data.getJSONObject("candidate") val candidate = IceCandidate( candidateData.getString("sdpMid"), candidateData.getInt("sdpMLineIndex"), candidateData.getString("candidate") ) if (localPeer?.remoteDescription != null) { localPeer?.addIceCandidate(candidate) } else { pendingIceCandidates.add(candidate) } } else -> { Log.e(TAG, "Unknown signaling type: ${data.getString("type")}") } } } private fun startStatsCollection() { statsJob = lifecycleScope.launch(Dispatchers.Default) { while (isActive) { delay(1000) // Collect stats every 1 second localPeer?.getStats { report -> parseStatsReport(report) } } } } private fun parseStatsReport(report: RTCStatsReport) { for (stats in report.statsMap.values) { if (stats.type == "inbound-rtp") { val kind = stats.members["kind"] as? String if (kind == "video") { val framesDecoded = stats.members["framesDecoded"] as? Double ?: 0.0 val bytesReceived = stats.members["bytesReceived"] as? Double ?: 0.0 val timestamp = stats.timestampUs / 1_000_000.0 // Convert to seconds if (prevTimestamp != 0.0) { val timeElapsed = timestamp - prevTimestamp val framesDelta = framesDecoded - prevFramesDecoded val bytesDelta = bytesReceived - prevBytesReceived val frameRate = framesDelta / timeElapsed val bitrate = (bytesDelta * 8) / timeElapsed // in bits per second // Update the state on the main thread lifecycleScope.launch(Dispatchers.Main) { frameRateState.doubleValue = frameRate bitrateState.longValue = bitrate.toLong() } } // Update previous values prevFramesDecoded = framesDecoded prevBytesReceived = bytesReceived prevTimestamp = timestamp } } } } override fun onDestroy() { super.onDestroy() statsJob?.cancel() socket.disconnect() localPeer?.dispose() localView?.release() remoteView?.release() localEglBase?.release() remoteEglBase?.release() }
}
目前这段代码并没有成功显示远程画面的帧率和码率,是什么原因
There are several potential reasons why the remote video’s frame rate and bitrate are not being successfully displayed in your ClientActivity4
class. Below, I outline the most common issues and provide detailed steps to help you identify and resolve the problem.
Issue:
The frameRateState
and bitrateState
are defined using mutableDoubleStateOf
and mutableLongStateOf
. These are not standard Compose state holders, which might prevent the UI from correctly observing and updating these values.
Solution:
Use mutableStateOf
instead of mutableDoubleStateOf
and mutableLongStateOf
. This ensures that Compose properly observes changes and recomposes the UI accordingly.
Implementation:
kotlinimport androidx.compose.runtime.getValue import androidx.compose.runtime.mutableStateOf import androidx.compose.runtime.setValue // Replace your current state definitions with the following: private var frameRate by mutableStateOf(0.0) private var bitrate by mutableStateOf(0L)
Update State in parseStatsReport
:
kotlinprivate fun parseStatsReport(report: RTCStatsReport) { for (stats in report.statsMap.values) { if (stats.type == "inbound-rtp") { val kind = stats.members["kind"] as? String if (kind == "video") { val framesDecoded = stats.members["framesDecoded"] as? Double ?: 0.0 val bytesReceived = stats.members["bytesReceived"] as? Double ?: 0.0 val timestamp = stats.timestampUs / 1_000_000.0 // Convert to seconds if (prevTimestamp != 0.0) { val timeElapsed = timestamp - prevTimestamp val framesDelta = framesDecoded - prevFramesDecoded val bytesDelta = bytesReceived - prevBytesReceived val currentFrameRate = framesDelta / timeElapsed val currentBitrate = (bytesDelta * 8) / timeElapsed // in bits per second // Update the state on the main thread lifecycleScope.launch(Dispatchers.Main) { frameRate = currentFrameRate bitrate = currentBitrate.toLong() } } // Update previous values prevFramesDecoded = framesDecoded prevBytesReceived = bytesReceived prevTimestamp = timestamp } } } }
Issue:
The parseStatsReport
function might not be correctly identifying or accessing the required stats. This could be due to mismatched stat types or missing fields.
Solution:
Add comprehensive logging within the parseStatsReport
to verify that the expected stats are being received and correctly parsed.
Implementation:
kotlinprivate fun parseStatsReport(report: RTCStatsReport) { for (stats in report.statsMap.values) { Log.d(TAG, "Stats Type: ${stats.type}, ID: ${stats.id}, Timestamp: ${stats.timestampUs}") if (stats.type == "inbound-rtp") { val kind = stats.members["kind"] as? String Log.d(TAG, "Inbound RTP Kind: $kind") if (kind == "video") { val framesDecoded = stats.members["framesDecoded"] as? Double ?: 0.0 val bytesReceived = stats.members["bytesReceived"] as? Double ?: 0.0 val timestamp = stats.timestampUs / 1_000_000.0 // Convert to seconds Log.d(TAG, "Frames Decoded: $framesDecoded, Bytes Received: $bytesReceived, Timestamp: $timestamp") if (prevTimestamp != 0.0) { val timeElapsed = timestamp - prevTimestamp val framesDelta = framesDecoded - prevFramesDecoded val bytesDelta = bytesReceived - prevBytesReceived val currentFrameRate = framesDelta / timeElapsed val currentBitrate = (bytesDelta * 8) / timeElapsed // in bits per second Log.d(TAG, "Calculated Frame Rate: $currentFrameRate fps, Bitrate: $currentBitrate bps") // Update the state on the main thread lifecycleScope.launch(Dispatchers.Main) { frameRate = currentFrameRate bitrate = currentBitrate.toLong() } } // Update previous values prevFramesDecoded = framesDecoded prevBytesReceived = bytesReceived prevTimestamp = timestamp } } } }
Action Steps:
inbound-rtp
stats for video
are being captured.framesDecoded
and bytesReceived
are present and have valid values.Issue:
Even if the state is correctly updated, if the Compose UI isn't properly observing these changes, the UI won't reflect the updated values.
Solution:
Ensure that the Text
composable is correctly observing the state variables. With the updated mutableStateOf
, Compose should automatically recompose when these variables change.
Implementation:
kotlin@Composable fun WebRTCComposeLayout() { // ... existing code ... // Display Frame Rate and Bitrate Text( text = "Frame Rate: ${frameRate.roundToInt()} fps\nBitrate: ${bitrate / 1000} kbps", color = Color.White, modifier = Modifier .align(Alignment.TopStart) .padding(16.dp) ) // ... existing code ... }
Issue:
The startStatsCollection
coroutine might not be properly initiated or could be prematurely canceled, preventing stats from being collected.
Solution:
Ensure that startStatsCollection
is called only after the PeerConnection
is fully established and that the coroutine remains active as long as needed.
Implementation:
startStatsCollection
is called within the createPeerConnection
method after the PeerConnection
is successfully created.PeerConnection
remains active and that the coroutine isn't canceled unexpectedly.Issue:
Different versions of WebRTC might have varying structures for stats reports. Ensure compatibility with the WebRTC version you are using.
Solution:
Consult the WebRTC documentation for your specific version to verify the correct stat types and fields. Adjust your parsing logic accordingly.
mutableDoubleStateOf
and mutableLongStateOf
with mutableStateOf
.parseStatsReport
to verify that stats are being correctly received and parsed.Text
composable is correctly observing and displaying the updated state values.By systematically addressing each of these areas, you should be able to identify why the frame rate and bitrate are not being displayed and implement the necessary fixes to resolve the issue.
给我增加打印帧率码率的代码
要在您的 ClientActivity4
类中增加打印远程视频帧率和码率的日志,以便更好地调试和监控这些指标,可以按照以下步骤进行操作:
parseStatsReport
函数中添加日志打印下面是详细的步骤和代码示例:
首先,确保您的状态变量使用 mutableStateOf
来使 Compose 能够观察到它们的变化并自动重新组合 UI。
修改前:
kotlin// State variables for frame rate and bitrate private val frameRateState = mutableDoubleStateOf(0.0) private val bitrateState = mutableLongStateOf(0L)
修改后:
kotlinimport androidx.compose.runtime.getValue import androidx.compose.runtime.mutableStateOf import androidx.compose.runtime.setValue // State variables for frame rate and bitrate private var frameRate by mutableStateOf(0.0) private var bitrate by mutableStateOf(0L)
parseStatsReport
函数中添加日志打印在解析统计报告的过程中,添加日志打印语句以输出当前的帧率和码率。
修改后的 parseStatsReport
函数:
kotlinprivate fun parseStatsReport(report: RTCStatsReport) { for (stats in report.statsMap.values) { if (stats.type == "inbound-rtp") { val kind = stats.members["kind"] as? String if (kind == "video") { val framesDecoded = stats.members["framesDecoded"] as? Double ?: 0.0 val bytesReceived = stats.members["bytesReceived"] as? Double ?: 0.0 val timestamp = stats.timestampUs / 1_000_000.0 // 转换为秒 Log.d(TAG, "Frames Decoded: $framesDecoded, Bytes Received: $bytesReceived, Timestamp: $timestamp") if (prevTimestamp != 0.0) { val timeElapsed = timestamp - prevTimestamp val framesDelta = framesDecoded - prevFramesDecoded val bytesDelta = bytesReceived - prevBytesReceived val currentFrameRate = framesDelta / timeElapsed val currentBitrate = (bytesDelta * 8) / timeElapsed // 位每秒 Log.d(TAG, "Calculated Frame Rate: $currentFrameRate fps, Bitrate: $currentBitrate bps") // 在主线程更新状态 lifecycleScope.launch(Dispatchers.Main) { frameRate = currentFrameRate bitrate = currentBitrate.toLong() } } // 更新之前的值 prevFramesDecoded = framesDecoded prevBytesReceived = bytesReceived prevTimestamp = timestamp } } } }
解释:
framesDecoded
、bytesReceived
和 timestamp
: 这些日志帮助您确认统计数据是否正确获取。确保您的 Compose 布局正确地观察到这些状态变化并在 UI 上显示。
修改后的 WebRTCComposeLayout
函数:
kotlin@Composable fun WebRTCComposeLayout() { val context = LocalContext.current var peerConnectionFactory: PeerConnectionFactory? = null var localVideoTrack: VideoTrack? by remember { mutableStateOf(null) } Surface(color = Color.Black) { Box(modifier = Modifier.fillMaxSize()) { // Remote Video View AndroidView( factory = { remoteView = SurfaceViewRenderer(it).apply { setZOrderMediaOverlay(false) } remoteView!! }, modifier = Modifier.fillMaxSize(), update = { if (remoteEglBase?.eglBaseContext == null) { remoteEglBase = EglBase.create() it.init(remoteEglBase!!.eglBaseContext, null) it.setMirror(false) } } ) // Local Video View AndroidView( factory = { localView = SurfaceViewRenderer(it).apply { setZOrderMediaOverlay(true) } localView!! }, modifier = Modifier .size(100.dp, 150.dp) .align(Alignment.BottomEnd) .padding(16.dp), update = { if (localEglBase == null) { localEglBase = EglBase.create() it.init(localEglBase!!.eglBaseContext, null) it.setMirror(true) } } ) // 显示帧率和码率 Text( text = "帧率: ${frameRate.roundToInt()} fps\n码率: ${bitrate / 1000} kbps", color = Color.White, modifier = Modifier .align(Alignment.TopStart) .padding(16.dp) ) // 初始化 PeerConnectionFactory 和创建 PeerConnection LaunchedEffect(Unit) { val options = PeerConnectionFactory.InitializationOptions.builder(context) .createInitializationOptions() PeerConnectionFactory.initialize(options) val encoderFactory = DefaultVideoEncoderFactory( localEglBase!!.eglBaseContext, true, true ) val decoderFactory = DefaultVideoDecoderFactory(remoteEglBase!!.eglBaseContext) peerConnectionFactory = PeerConnectionFactory.builder() .setVideoEncoderFactory(encoderFactory) .setVideoDecoderFactory(decoderFactory) .createPeerConnectionFactory() initLocalVideo(context, localView, peerConnectionFactory!!, localEglBase!!) { localVideoTrack = it } createPeerConnection( context, peerConnectionFactory, localVideoTrack, remoteView!! ) { localPeer = it localPeerLiveData.postValue(it) } } } } }
解释:
frameRate
和 bitrate
状态变量,这些变量会在 parseStatsReport
中更新。by mutableStateOf
确保 Compose 能观察到状态变化并自动更新 UI。确保您的日志输出级别设置为 DEBUG
或更低,以便能够看到 Log.d
的输出。在 Android Studio 的 Logcat 中,您可以通过以下步骤进行验证:
DEBUG
或更低。textD/WebRTC-Client: Frames Decoded: 120.0, Bytes Received: 1500000.0, Timestamp: 162514.0 D/WebRTC-Client: Calculated Frame Rate: 30.0 fps, Bitrate: 1200000 bps
为了方便起见,以下是包括上述所有修改的完整代码片段:
kotlinimport android.Manifest import android.content.Context import android.content.pm.PackageManager import android.os.Bundle import android.util.Log import androidx.activity.ComponentActivity import androidx.activity.compose.setContent import androidx.compose.foundation.layout.* import androidx.compose.material3.Text import androidx.compose.runtime.* import androidx.compose.ui.Alignment import androidx.compose.ui.Modifier import androidx.compose.ui.graphics.Color import androidx.compose.ui.unit.dp import androidx.lifecycle.lifecycleScope import io.socket.client.IO import io.socket.client.Socket import org.json.JSONObject import org.webrtc.* import kotlinx.coroutines.Dispatchers import kotlinx.coroutines.Job import kotlinx.coroutines.delay import kotlinx.coroutines.launch import kotlin.math.roundToInt class ClientActivity4 : ComponentActivity() { private lateinit var socket: Socket private var localPeer: PeerConnection? = null private var localView: SurfaceViewRenderer? = null private var remoteView: SurfaceViewRenderer? = null private var localEglBase: EglBase? = null private var remoteEglBase: EglBase? = null private val pendingIceCandidates = mutableListOf<IceCandidate>() private var currentRoom: String? = null private val localPeerLiveData = MutableLiveData<PeerConnection?>(null) private lateinit var signalingServerUrl: String private lateinit var stunUrl: String private lateinit var turnUrl: String private lateinit var turnUsername: String private lateinit var turnPassword: String private val TAG: String = "WebRTC-Client" // State variables for frame rate and bitrate private var frameRate by mutableStateOf(0.0) private var bitrate by mutableStateOf(0L) // Variables to track previous stats for calculations private var prevFramesDecoded = 0.0 private var prevBytesReceived = 0.0 private var prevTimestamp = 0.0 // Job for stats collection coroutine private var statsJob: Job? = null override fun onCreate(savedInstanceState: Bundle?) { super.onCreate(savedInstanceState) // 设置角色和房间 currentRoom = intent.getStringExtra("room") ?: "default-room" signalingServerUrl = intent.getStringExtra("signalingServerUrl") ?: "https://wstszx.us.kg" stunUrl = intent.getStringExtra("stunUrl") ?: "stun:stun.wstszx.us.kg:3478" turnUrl = intent.getStringExtra("turnUrl") ?: "turn:turn.wstszx.us.kg:5349?transport=udp" turnUsername = intent.getStringExtra("turnUsername") ?: "wstszx" turnPassword = intent.getStringExtra("turnPassword") ?: "930379" Log.d(TAG, "onCreate: 角色 = 客户端, 房间 = $currentRoom, 信令服务器 = $signalingServerUrl, STUN URL = $stunUrl, TURN URL = $turnUrl") initializeSocketIO() setContent { WebRTCComposeLayout() } requestPermissionsIfNeeded() } private fun initializeSocketIO() { val protocol = if (signalingServerUrl.startsWith("https")) "https" else "http" val socketUrl = signalingServerUrl val options = IO.Options().apply { transports = arrayOf("websocket") secure = protocol == "https" path = "/socket.io/" } try { socket = IO.socket(socketUrl, options) socket.on(Socket.EVENT_CONNECT) { Log.d(TAG, "Socket 已连接") socket.emit("join", currentRoom) Log.d(TAG, "已加入房间: $currentRoom") initiateCall() } socket.on(Socket.EVENT_CONNECT_ERROR) { args -> if (args.isNotEmpty()) { val error = args[0] Log.e(TAG, "Socket 连接错误: $error") } } socket.on(Socket.EVENT_DISCONNECT) { args -> if (args.isNotEmpty()) { val reason = args[0] Log.d(TAG, "Socket 已断开: $reason") } } socket.on("signal") { args -> Log.d(TAG, "收到信令: ${args[0]}") if (args.isNotEmpty() && args[0] is JSONObject) { val data = args[0] as JSONObject handleSignalingData(data) } } socket.connect() Log.d(TAG, "正在连接到 Socket: $socketUrl...") } catch (e: Exception) { Log.e(TAG, "连接 Socket 时出错: ${e.message}") } } private fun requestPermissionsIfNeeded() { if (checkSelfPermission(Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED || checkSelfPermission(Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED || checkSelfPermission(Manifest.permission.INTERNET) != PackageManager.PERMISSION_GRANTED || checkSelfPermission(Manifest.permission.ACCESS_NETWORK_STATE) != PackageManager.PERMISSION_GRANTED ) { requestPermissions( arrayOf( Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO, Manifest.permission.INTERNET, Manifest.permission.ACCESS_NETWORK_STATE ), 1 ) } } @Composable fun WebRTCComposeLayout() { val context = LocalContext.current var peerConnectionFactory: PeerConnectionFactory? = null var localVideoTrack: VideoTrack? by remember { mutableStateOf(null) } Surface(color = Color.Black) { Box(modifier = Modifier.fillMaxSize()) { // Remote Video View AndroidView( factory = { remoteView = SurfaceViewRenderer(it).apply { setZOrderMediaOverlay(false) } remoteView!! }, modifier = Modifier.fillMaxSize(), update = { if (remoteEglBase?.eglBaseContext == null) { remoteEglBase = EglBase.create() it.init(remoteEglBase!!.eglBaseContext, null) it.setMirror(false) } } ) // Local Video View AndroidView( factory = { localView = SurfaceViewRenderer(it).apply { setZOrderMediaOverlay(true) } localView!! }, modifier = Modifier .size(100.dp, 150.dp) .align(Alignment.BottomEnd) .padding(16.dp), update = { if (localEglBase == null) { localEglBase = EglBase.create() it.init(localEglBase!!.eglBaseContext, null) it.setMirror(true) } } ) // 显示帧率和码率 Text( text = "帧率: ${frameRate.roundToInt()} fps\n码率: ${bitrate / 1000} kbps", color = Color.White, modifier = Modifier .align(Alignment.TopStart) .padding(16.dp) ) // 初始化 PeerConnectionFactory 和创建 PeerConnection LaunchedEffect(Unit) { val options = PeerConnectionFactory.InitializationOptions.builder(context) .createInitializationOptions() PeerConnectionFactory.initialize(options) val encoderFactory = DefaultVideoEncoderFactory( localEglBase!!.eglBaseContext, true, true ) val decoderFactory = DefaultVideoDecoderFactory(remoteEglBase!!.eglBaseContext) peerConnectionFactory = PeerConnectionFactory.builder() .setVideoEncoderFactory(encoderFactory) .setVideoDecoderFactory(decoderFactory) .createPeerConnectionFactory() initLocalVideo(context, localView, peerConnectionFactory!!, localEglBase!!) { localVideoTrack = it } createPeerConnection( context, peerConnectionFactory, localVideoTrack, remoteView!! ) { localPeer = it localPeerLiveData.postValue(it) } } } } } private fun initLocalVideo( context: Context, localView: SurfaceViewRenderer?, peerConnectionFactory: PeerConnectionFactory, eglBase: EglBase, onLocalVideoTrack: (VideoTrack) -> Unit ) { val videoCapturer = createCameraCapturer(context) val surfaceTextureHelper = SurfaceTextureHelper.create("CaptureThread", eglBase.eglBaseContext) val videoSource = peerConnectionFactory.createVideoSource(videoCapturer.isScreencast) videoCapturer.initialize(surfaceTextureHelper, context, videoSource.capturerObserver) videoCapturer.startCapture(1920, 1080, 60) val localVideoTrack = peerConnectionFactory.createVideoTrack("video_track", videoSource) localVideoTrack.addSink(localView) val audioSource = peerConnectionFactory.createAudioSource(MediaConstraints()) val localAudioTrack = peerConnectionFactory.createAudioTrack("audio_track", audioSource) // 添加音视频轨道到本地流 val mediaStream = peerConnectionFactory.createLocalMediaStream("local_stream") mediaStream.addTrack(localAudioTrack) mediaStream.addTrack(localVideoTrack) onLocalVideoTrack(localVideoTrack) } private fun createCameraCapturer(context: Context): CameraVideoCapturer { val camera2Enumerator = Camera2Enumerator(context) val deviceNames = camera2Enumerator.deviceNames for (deviceName in deviceNames) { if (camera2Enumerator.isFrontFacing(deviceName)) { return camera2Enumerator.createCapturer(deviceName, null) ?: throw IllegalStateException("无法创建前置摄像头捕获器") } } // 如果没有前置摄像头,则使用第一个摄像头 return camera2Enumerator.createCapturer(deviceNames[0], null) ?: throw IllegalStateException("无法创建摄像头捕获器") } private fun createPeerConnection( context: Context, peerConnectionFactory: PeerConnectionFactory, localVideoTrack: VideoTrack?, remoteView: SurfaceViewRenderer, onLocalPeerCreated: (PeerConnection) -> Unit ) { val iceServers = listOf( PeerConnection.IceServer.builder(stunUrl).createIceServer(), PeerConnection.IceServer.builder(turnUrl) .setUsername(turnUsername) .setPassword(turnPassword) .createIceServer() ) val rtcConfig = PeerConnection.RTCConfiguration(iceServers).apply { bundlePolicy = PeerConnection.BundlePolicy.MAXBUNDLE rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.REQUIRE tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.ENABLED continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_CONTINUALLY sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN } val localPeer = peerConnectionFactory.createPeerConnection(rtcConfig, object : PeerConnection.Observer { override fun onIceCandidate(iceCandidate: IceCandidate?) { iceCandidate?.let { Log.d(TAG, "ICE candidate: $it") val signalData = JSONObject().apply { put("type", "ice") put("candidate", JSONObject().apply { put("sdpMid", it.sdpMid) put("sdpMLineIndex", it.sdpMLineIndex) put("candidate", it.sdp) }) put("room", currentRoom) } socket.emit("signal", signalData) } } override fun onIceCandidatesRemoved(candidates: Array<out IceCandidate>?) { Log.d(TAG, "ICE candidates removed") } override fun onSignalingChange(newState: PeerConnection.SignalingState?) { Log.d(TAG, "Signaling state changed to: $newState") } override fun onIceConnectionChange(newState: PeerConnection.IceConnectionState?) { Log.d(TAG, "ICE connection state changed to: $newState") } override fun onIceConnectionReceivingChange(receiving: Boolean) { Log.d(TAG, "ICE connection receiving change: $receiving") } override fun onIceGatheringChange(newState: PeerConnection.IceGatheringState?) { Log.d(TAG, "ICE gathering state changed to: $newState") } override fun onAddStream(stream: MediaStream?) { Log.d(TAG, "Stream added") } override fun onRemoveStream(stream: MediaStream?) { Log.d(TAG, "Stream removed") } override fun onDataChannel(dataChannel: DataChannel?) { Log.d(TAG, "Data channel created") } override fun onRenegotiationNeeded() { Log.d(TAG, "Renegotiation needed") } override fun onAddTrack(receiver: RtpReceiver?, streams: Array<out MediaStream>?) { Log.d(TAG, "Track added") receiver?.track()?.let { track -> if (track is VideoTrack) { track.addSink(remoteView) } } } override fun onTrack(transceiver: RtpTransceiver?) { Log.d(TAG, "onTrack called") transceiver?.receiver?.track()?.let { track -> if (track is VideoTrack) { track.addSink(remoteView) } } } override fun onConnectionChange(newState: PeerConnection.PeerConnectionState?) { Log.d(TAG, "Connection state changed to: $newState") } }) localVideoTrack?.let { localPeer?.addTrack(it, listOf("local_stream")) } val audioSource = peerConnectionFactory.createAudioSource(MediaConstraints()) val localAudioTrack = peerConnectionFactory.createAudioTrack("audio_track", audioSource) localPeer?.addTrack(localAudioTrack, listOf("local_stream")) onLocalPeerCreated(localPeer!!) // 开始收集统计数据 startStatsCollection() } private fun initiateCall() { Log.d(TAG, "Initiating call...") val constraints = MediaConstraints().apply { mandatory.add(MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true")) mandatory.add(MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true")) } localPeer?.createOffer(object : SdpObserver { override fun onCreateSuccess(sessionDescription: SessionDescription?) { sessionDescription?.let { sdp -> localPeer?.setLocalDescription(object : SdpObserver { override fun onSetSuccess() { val signalData = JSONObject().apply { put("type", "offer") put("sdp", JSONObject().put("sdp", sdp.description)) put("room", currentRoom) } socket.emit("signal", signalData) } override fun onSetFailure(error: String?) { Log.e(TAG, "Set local description error: $error") } override fun onCreateSuccess(p0: SessionDescription?) {} override fun onCreateFailure(p0: String?) {} }, sdp) } } override fun onSetSuccess() {} override fun onCreateFailure(error: String?) { Log.e(TAG, "Create offer error: $error") } override fun onSetFailure(error: String?) {} }, constraints) } private fun handleSignalingData(data: JSONObject) { Log.d(TAG, "Handling signaling data: $data") when (data.getString("type")) { "answer" -> { Log.d(TAG, "Received answer") val sdp = SessionDescription( SessionDescription.Type.ANSWER, data.getJSONObject("sdp").getString("sdp") ) localPeer?.setRemoteDescription(object : SdpObserver { override fun onSetSuccess() { pendingIceCandidates.forEach { candidate -> localPeer?.addIceCandidate(candidate) } pendingIceCandidates.clear() Log.d(TAG, "Set remote description (answer) success") } override fun onSetFailure(error: String?) { Log.e(TAG, "Set remote description error: $error") } override fun onCreateSuccess(p0: SessionDescription?) {} override fun onCreateFailure(p0: String?) {} }, sdp) } "ice" -> { Log.d(TAG, "Received ICE candidate") val candidateData = data.getJSONObject("candidate") val candidate = IceCandidate( candidateData.getString("sdpMid"), candidateData.getInt("sdpMLineIndex"), candidateData.getString("candidate") ) if (localPeer?.remoteDescription != null) { localPeer?.addIceCandidate(candidate) } else { pendingIceCandidates.add(candidate) } } else -> { Log.e(TAG, "Unknown signaling type: ${data.getString("type")}") } } } private fun startStatsCollection() { statsJob = lifecycleScope.launch(Dispatchers.Default) { while (isActive) { delay(1000) // 每秒收集一次统计数据 localPeer?.getStats { report -> parseStatsReport(report) } } } } private fun parseStatsReport(report: RTCStatsReport) { for (stats in report.statsMap.values) { if (stats.type == "inbound-rtp") { val kind = stats.members["kind"] as? String if (kind == "video") { val framesDecoded = stats.members["framesDecoded"] as? Double ?: 0.0 val bytesReceived = stats.members["bytesReceived"] as? Double ?: 0.0 val timestamp = stats.timestampUs / 1_000_000.0 // 转换为秒 Log.d(TAG, "Frames Decoded: $framesDecoded, Bytes Received: $bytesReceived, Timestamp: $timestamp") if (prevTimestamp != 0.0) { val timeElapsed = timestamp - prevTimestamp val framesDelta = framesDecoded - prevFramesDecoded val bytesDelta = bytesReceived - prevBytesReceived val currentFrameRate = framesDelta / timeElapsed val currentBitrate = (bytesDelta * 8) / timeElapsed // 位每秒 Log.d(TAG, "Calculated Frame Rate: $currentFrameRate fps, Bitrate: $currentBitrate bps") // 在主线程更新状态 lifecycleScope.launch(Dispatchers.Main) { frameRate = currentFrameRate bitrate = currentBitrate.toLong() } } // 更新之前的值 prevFramesDecoded = framesDecoded prevBytesReceived = bytesReceived prevTimestamp = timestamp } } } } override fun onDestroy() { super.onDestroy() statsJob?.cancel() socket.disconnect() localPeer?.dispose() localView?.release() remoteView?.release() localEglBase?.release() remoteEglBase?.release() } }
WebRTC-Client
。textD/WebRTC-Client: Frames Decoded: 120.0, Bytes Received: 1500000.0, Timestamp: 162514.0 D/WebRTC-Client: Calculated Frame Rate: 30.0 fps, Bitrate: 1200000 bps
通过以上步骤,您应该能够成功地打印和显示远程视频的帧率和码率,从而更好地监控 WebRTC 连接的性能。如果在实施过程中遇到任何问题,请随时向我寻求进一步的帮助!