davidliu
Committed by GitHub

Fix deadlock caused by multiple concurrent setCameraEnabled calls (#472)

* Fix deadlock caused by multiple concurrent setCameraEnabled calls

* tests

* spotless
... ... @@ -19,13 +19,13 @@ package io.livekit.android.memory
import livekit.org.webrtc.SurfaceTextureHelper
import java.io.Closeable
internal class SurfaceTextureHelperCloser(private val surfaceTextureHelper: SurfaceTextureHelper) : Closeable {
internal class SurfaceTextureHelperCloser(private val surfaceTextureHelper: SurfaceTextureHelper?) : Closeable {
private var isClosed = false
override fun close() {
if (!isClosed) {
isClosed = true
surfaceTextureHelper.stopListening()
surfaceTextureHelper.dispose()
surfaceTextureHelper?.stopListening()
surfaceTextureHelper?.dispose()
}
}
}
... ...
... ... @@ -52,6 +52,8 @@ import io.livekit.android.webrtc.sortVideoCodecPreferences
import kotlinx.coroutines.CoroutineDispatcher
import kotlinx.coroutines.Job
import kotlinx.coroutines.launch
import kotlinx.coroutines.sync.Mutex
import kotlinx.coroutines.sync.withLock
import livekit.LivekitModels
import livekit.LivekitRtc
import livekit.LivekitRtc.AddTrackRequest
... ... @@ -99,6 +101,12 @@ internal constructor(
private val jobs = mutableMapOf<Any, Job>()
// For ensuring that only one caller can execute setTrackEnabled at a time.
// Without it, there's a potential to create multiple of the same source,
// Camera has deadlock issues with multiple CameraCapturers trying to activate/stop.
private val sourcePubLocks = Track.Source.values()
.associate { source -> source to Mutex() }
/**
* Creates an audio track, recording audio through the microphone with the given [options].
*
... ... @@ -228,57 +236,60 @@ internal constructor(
enabled: Boolean,
mediaProjectionPermissionResultData: Intent? = null,
) {
val pub = getTrackPublication(source)
if (enabled) {
if (pub != null) {
pub.muted = false
if (source == Track.Source.CAMERA && pub.track is LocalVideoTrack) {
(pub.track as? LocalVideoTrack)?.startCapture()
}
} else {
when (source) {
Track.Source.CAMERA -> {
val track = createVideoTrack()
track.startCapture()
publishVideoTrack(track)
val pubLock = sourcePubLocks[source]!!
pubLock.withLock {
val pub = getTrackPublication(source)
if (enabled) {
if (pub != null) {
pub.muted = false
if (source == Track.Source.CAMERA && pub.track is LocalVideoTrack) {
(pub.track as? LocalVideoTrack)?.startCapture()
}
} else {
when (source) {
Track.Source.CAMERA -> {
val track = createVideoTrack()
track.startCapture()
publishVideoTrack(track)
}
Track.Source.MICROPHONE -> {
val track = createAudioTrack()
publishAudioTrack(track)
}
Track.Source.MICROPHONE -> {
val track = createAudioTrack()
publishAudioTrack(track)
}
Track.Source.SCREEN_SHARE -> {
if (mediaProjectionPermissionResultData == null) {
throw IllegalArgumentException("Media Projection permission result data is required to create a screen share track.")
Track.Source.SCREEN_SHARE -> {
if (mediaProjectionPermissionResultData == null) {
throw IllegalArgumentException("Media Projection permission result data is required to create a screen share track.")
}
val track =
createScreencastTrack(mediaProjectionPermissionResultData = mediaProjectionPermissionResultData)
track.startForegroundService(null, null)
track.startCapture()
publishVideoTrack(track)
}
val track =
createScreencastTrack(mediaProjectionPermissionResultData = mediaProjectionPermissionResultData)
track.startForegroundService(null, null)
track.startCapture()
publishVideoTrack(track)
}
else -> {
LKLog.w { "Attempting to enable an unknown source, ignoring." }
else -> {
LKLog.w { "Attempting to enable an unknown source, ignoring." }
}
}
}
}
} else {
pub?.track?.let { track ->
// screenshare cannot be muted, unpublish instead
if (pub.source == Track.Source.SCREEN_SHARE) {
unpublishTrack(track)
} else {
pub.muted = true
// Release camera session so other apps can use.
if (pub.source == Track.Source.CAMERA && track is LocalVideoTrack) {
track.stopCapture()
} else {
pub?.track?.let { track ->
// screenshare cannot be muted, unpublish instead
if (pub.source == Track.Source.SCREEN_SHARE) {
unpublishTrack(track)
} else {
pub.muted = true
// Release camera session so other apps can use.
if (pub.source == Track.Source.CAMERA && track is LocalVideoTrack) {
track.stopCapture()
}
}
}
}
return@withLock
}
}
... ... @@ -484,6 +495,7 @@ internal constructor(
options = options,
)
addTrackPublication(publication)
LKLog.e { "add track publication $publication" }
publishListener?.onPublishSuccess(publication)
internalListener?.onTrackPublished(publication, this)
... ...
/*
* Copyright 2024 LiveKit, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.livekit.android.test.mock
import livekit.org.webrtc.AudioSource
class MockAudioSource : AudioSource(100L)
... ...
... ... @@ -17,7 +17,6 @@
package io.livekit.android.test.mock
import com.google.protobuf.MessageLite
import io.livekit.android.test.util.toOkioByteString
import io.livekit.android.test.util.toPBByteString
import io.livekit.android.util.toOkioByteString
import livekit.LivekitModels
... ... @@ -56,14 +55,21 @@ class MockWebSocketFactory : WebSocket.Factory {
return ws
}
private val signalRequestHandlers = mutableListOf<SignalRequestHandler>(
{ signalRequest -> defaultHandleSignalRequest(signalRequest) },
)
val defaultSignalRequestHandler: SignalRequestHandler = { signalRequest -> defaultHandleSignalRequest(signalRequest) }
private val signalRequestHandlers = mutableListOf(defaultSignalRequestHandler)
/**
* Adds a handler to the front of the list.
*/
fun registerSignalRequestHandler(handler: SignalRequestHandler) {
signalRequestHandlers.add(0, handler)
}
fun unregisterSignalRequestHandler(handler: SignalRequestHandler) {
signalRequestHandlers.remove(handler)
}
private fun handleSignalRequest(signalRequest: SignalRequest) {
for (handler in signalRequestHandlers) {
if (handler.invoke(signalRequest)) {
... ...
/*
* Copyright 2024 LiveKit, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.livekit.android.test.mock.camera
import android.content.Context
import io.livekit.android.room.track.LocalVideoTrackOptions
import io.livekit.android.room.track.video.CameraCapturerUtils
import io.livekit.android.room.track.video.CameraEventsDispatchHandler
import livekit.org.webrtc.CameraEnumerationAndroid
import livekit.org.webrtc.CameraEnumerator
import livekit.org.webrtc.CameraVideoCapturer
import livekit.org.webrtc.CapturerObserver
import livekit.org.webrtc.SurfaceTextureHelper
import livekit.org.webrtc.VideoCapturer
class MockCameraProvider : CameraCapturerUtils.CameraProvider {
companion object {
fun register() {
CameraCapturerUtils.registerCameraProvider(MockCameraProvider())
}
}
private val enumerator by lazy { MockCameraEnumerator() }
override val cameraVersion: Int = 100
override fun provideEnumerator(context: Context): CameraEnumerator {
return enumerator
}
override fun provideCapturer(context: Context, options: LocalVideoTrackOptions, eventsHandler: CameraEventsDispatchHandler): VideoCapturer {
return enumerator.createCapturer(options.deviceId, eventsHandler)
}
override fun isSupported(context: Context): Boolean {
return true
}
}
class MockCameraEnumerator : CameraEnumerator {
override fun getDeviceNames(): Array<String> {
return arrayOf("camera")
}
override fun isFrontFacing(deviceName: String): Boolean {
return true
}
override fun isBackFacing(deviceName: String): Boolean {
return false
}
override fun getSupportedFormats(p0: String): MutableList<CameraEnumerationAndroid.CaptureFormat> {
return mutableListOf(
CameraEnumerationAndroid.CaptureFormat(480, 640, 30, 30),
)
}
override fun createCapturer(deviceName: String?, eventsHandler: CameraVideoCapturer.CameraEventsHandler): CameraVideoCapturer {
return MockCameraVideoCapturer()
}
}
class MockCameraVideoCapturer : CameraVideoCapturer {
override fun initialize(p0: SurfaceTextureHelper?, p1: Context?, p2: CapturerObserver?) {
}
override fun startCapture(p0: Int, p1: Int, p2: Int) {
}
override fun stopCapture() {
}
override fun changeCaptureFormat(p0: Int, p1: Int, p2: Int) {
}
override fun dispose() {
}
override fun isScreencast(): Boolean = false
override fun switchCamera(p0: CameraVideoCapturer.CameraSwitchHandler?) {
}
override fun switchCamera(p0: CameraVideoCapturer.CameraSwitchHandler?, p1: String?) {
}
}
... ...
... ... @@ -17,8 +17,13 @@
package io.livekit.android.test.util
import com.google.protobuf.ByteString
import livekit.LivekitRtc
import okio.ByteString.Companion.toByteString
fun com.google.protobuf.ByteString.toOkioByteString() = toByteArray().toByteString()
fun okio.ByteString.toPBByteString() = ByteString.copyFrom(toByteArray())
fun okio.ByteString.toSignalRequest() = LivekitRtc.SignalRequest.newBuilder()
.mergeFrom(toPBByteString())
.build()
... ...
... ... @@ -16,6 +16,8 @@
package livekit.org.webrtc
import io.livekit.android.test.mock.MockAudioSource
import io.livekit.android.test.mock.MockAudioStreamTrack
import io.livekit.android.test.mock.MockPeerConnection
import io.livekit.android.test.mock.MockVideoSource
import io.livekit.android.test.mock.MockVideoStreamTrack
... ... @@ -30,6 +32,14 @@ class MockPeerConnectionFactory : PeerConnectionFactory(1L) {
return MockPeerConnection(rtcConfig, observer)
}
override fun createAudioSource(constraints: MediaConstraints?): AudioSource {
return MockAudioSource()
}
override fun createAudioTrack(id: String, source: AudioSource?): AudioTrack {
return MockAudioStreamTrack(id = id)
}
override fun createVideoSource(isScreencast: Boolean, alignTimestamps: Boolean): VideoSource {
return MockVideoSource()
}
... ...
... ... @@ -16,6 +16,10 @@
package io.livekit.android.room.participant
import android.Manifest
import android.app.Application
import android.content.Context
import androidx.test.core.app.ApplicationProvider
import io.livekit.android.audio.AudioProcessorInterface
import io.livekit.android.events.ParticipantEvent
import io.livekit.android.events.RoomEvent
... ... @@ -36,9 +40,14 @@ import io.livekit.android.test.mock.MockEglBase
import io.livekit.android.test.mock.MockVideoCapturer
import io.livekit.android.test.mock.MockVideoStreamTrack
import io.livekit.android.test.mock.TestData
import io.livekit.android.test.mock.camera.MockCameraProvider
import io.livekit.android.test.util.toPBByteString
import io.livekit.android.util.toOkioByteString
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.ExperimentalCoroutinesApi
import kotlinx.coroutines.Job
import kotlinx.coroutines.cancel
import kotlinx.coroutines.launch
import kotlinx.coroutines.test.advanceUntilIdle
import livekit.LivekitModels
import livekit.LivekitModels.AudioTrackFeature
... ... @@ -57,6 +66,7 @@ import org.mockito.Mockito
import org.mockito.Mockito.mock
import org.mockito.kotlin.argThat
import org.robolectric.RobolectricTestRunner
import org.robolectric.Shadows
import java.nio.ByteBuffer
@ExperimentalCoroutinesApi
... ... @@ -111,6 +121,49 @@ class LocalParticipantMockE2ETest : MockE2ETest() {
}
@Test
fun setTrackEnabledIsSynchronizedSingleSource() = runTest {
connect()
val context = ApplicationProvider.getApplicationContext<Context>()
val shadowApplication = Shadows.shadowOf(context as Application)
shadowApplication.grantPermissions(Manifest.permission.RECORD_AUDIO)
wsFactory.unregisterSignalRequestHandler(wsFactory.defaultSignalRequestHandler)
wsFactory.ws.clearRequests()
val backgroundScope = CoroutineScope(coroutineContext + Job())
try {
backgroundScope.launch { room.localParticipant.setMicrophoneEnabled(true) }
backgroundScope.launch { room.localParticipant.setMicrophoneEnabled(true) }
assertEquals(1, wsFactory.ws.sentRequests.size)
} finally {
backgroundScope.cancel()
}
}
@Test
fun setTrackEnabledIsSynchronizedMultipleSource() = runTest {
connect()
MockCameraProvider.register()
val context = ApplicationProvider.getApplicationContext<Context>()
val shadowApplication = Shadows.shadowOf(context as Application)
shadowApplication.grantPermissions(Manifest.permission.RECORD_AUDIO, Manifest.permission.CAMERA)
wsFactory.unregisterSignalRequestHandler(wsFactory.defaultSignalRequestHandler)
wsFactory.ws.clearRequests()
val backgroundScope = CoroutineScope(coroutineContext + Job())
try {
backgroundScope.launch { room.localParticipant.setMicrophoneEnabled(true) }
backgroundScope.launch { room.localParticipant.setCameraEnabled(true) }
assertEquals(2, wsFactory.ws.sentRequests.size)
} finally {
backgroundScope.cancel()
}
}
@Test
fun publishVideoTrackRequest() = runTest {
connect()
wsFactory.ws.clearRequests()
... ...