Committed by
GitHub
FlowObservable isTrackEnabled variables (#685)
* Change isMicrophoneEnabled, isCameraEnabled, isScreenshareEnabled to FlowObservable variables * Compile fixes * spotless
正在显示
9 个修改的文件
包含
174 行增加
和
104 行删除
.changeset/strange-waves-tap.md
0 → 100644
| @@ -62,6 +62,7 @@ import kotlinx.coroutines.Job | @@ -62,6 +62,7 @@ import kotlinx.coroutines.Job | ||
| 62 | import kotlinx.coroutines.async | 62 | import kotlinx.coroutines.async |
| 63 | import kotlinx.coroutines.coroutineScope | 63 | import kotlinx.coroutines.coroutineScope |
| 64 | import kotlinx.coroutines.delay | 64 | import kotlinx.coroutines.delay |
| 65 | +import kotlinx.coroutines.flow.map | ||
| 65 | import kotlinx.coroutines.launch | 66 | import kotlinx.coroutines.launch |
| 66 | import kotlinx.coroutines.suspendCancellableCoroutine | 67 | import kotlinx.coroutines.suspendCancellableCoroutine |
| 67 | import kotlinx.coroutines.sync.Mutex | 68 | import kotlinx.coroutines.sync.Mutex |
| @@ -32,6 +32,7 @@ import io.livekit.android.util.flow | @@ -32,6 +32,7 @@ import io.livekit.android.util.flow | ||
| 32 | import io.livekit.android.util.flowDelegate | 32 | import io.livekit.android.util.flowDelegate |
| 33 | import kotlinx.coroutines.CoroutineDispatcher | 33 | import kotlinx.coroutines.CoroutineDispatcher |
| 34 | import kotlinx.coroutines.CoroutineScope | 34 | import kotlinx.coroutines.CoroutineScope |
| 35 | +import kotlinx.coroutines.ExperimentalCoroutinesApi | ||
| 35 | import kotlinx.coroutines.SupervisorJob | 36 | import kotlinx.coroutines.SupervisorJob |
| 36 | import kotlinx.coroutines.cancel | 37 | import kotlinx.coroutines.cancel |
| 37 | import kotlinx.coroutines.flow.Flow | 38 | import kotlinx.coroutines.flow.Flow |
| @@ -270,6 +271,7 @@ open class Participant( | @@ -270,6 +271,7 @@ open class Participant( | ||
| 270 | var trackPublications by flowDelegate(emptyMap<String, TrackPublication>()) | 271 | var trackPublications by flowDelegate(emptyMap<String, TrackPublication>()) |
| 271 | protected set | 272 | protected set |
| 272 | 273 | ||
| 274 | + @OptIn(ExperimentalCoroutinesApi::class) | ||
| 273 | private fun Flow<Map<String, TrackPublication>>.trackUpdateFlow(): Flow<List<Pair<TrackPublication, Track?>>> { | 275 | private fun Flow<Map<String, TrackPublication>>.trackUpdateFlow(): Flow<List<Pair<TrackPublication, Track?>>> { |
| 274 | return flatMapLatest { videoTracks -> | 276 | return flatMapLatest { videoTracks -> |
| 275 | if (videoTracks.isEmpty()) { | 277 | if (videoTracks.isEmpty()) { |
| @@ -365,23 +367,43 @@ open class Participant( | @@ -365,23 +367,43 @@ open class Participant( | ||
| 365 | return null | 367 | return null |
| 366 | } | 368 | } |
| 367 | 369 | ||
| 368 | - fun isCameraEnabled(): Boolean { | ||
| 369 | - val pub = getTrackPublication(Track.Source.CAMERA) | ||
| 370 | - return isTrackPublicationEnabled(pub) | ||
| 371 | - } | 370 | + @FlowObservable |
| 371 | + @get:FlowObservable | ||
| 372 | + val isMicrophoneEnabled by flowDelegate( | ||
| 373 | + stateFlow = ::audioTrackPublications.flow | ||
| 374 | + .map { it.firstOrNull { (pub, _) -> pub.source == Track.Source.MICROPHONE } ?: (null to null) } | ||
| 375 | + .isTrackEnabledDetector() | ||
| 376 | + .stateIn(delegateScope, SharingStarted.Eagerly, false), | ||
| 377 | + ) | ||
| 372 | 378 | ||
| 373 | - fun isMicrophoneEnabled(): Boolean { | ||
| 374 | - val pub = getTrackPublication(Track.Source.MICROPHONE) | ||
| 375 | - return isTrackPublicationEnabled(pub) | ||
| 376 | - } | 379 | + @FlowObservable |
| 380 | + @get:FlowObservable | ||
| 381 | + val isCameraEnabled by flowDelegate( | ||
| 382 | + stateFlow = ::videoTrackPublications.flow | ||
| 383 | + .map { it.firstOrNull { (pub, _) -> pub.source == Track.Source.CAMERA } ?: (null to null) } | ||
| 384 | + .isTrackEnabledDetector() | ||
| 385 | + .stateIn(delegateScope, SharingStarted.Eagerly, false), | ||
| 386 | + ) | ||
| 377 | 387 | ||
| 378 | - fun isScreenShareEnabled(): Boolean { | ||
| 379 | - val pub = getTrackPublication(Track.Source.SCREEN_SHARE) | ||
| 380 | - return isTrackPublicationEnabled(pub) | ||
| 381 | - } | 388 | + @FlowObservable |
| 389 | + @get:FlowObservable | ||
| 390 | + val isScreenShareEnabled by flowDelegate( | ||
| 391 | + stateFlow = ::videoTrackPublications.flow | ||
| 392 | + .map { it.firstOrNull { (pub, _) -> pub.source == Track.Source.SCREEN_SHARE } ?: (null to null) } | ||
| 393 | + .isTrackEnabledDetector() | ||
| 394 | + .stateIn(delegateScope, SharingStarted.Eagerly, false), | ||
| 395 | + ) | ||
| 382 | 396 | ||
| 383 | - private fun isTrackPublicationEnabled(pub: TrackPublication?): Boolean { | ||
| 384 | - return !(pub?.muted ?: true) | 397 | + @OptIn(ExperimentalCoroutinesApi::class) |
| 398 | + private fun Flow<Pair<TrackPublication?, Track?>>.isTrackEnabledDetector(): Flow<Boolean> { | ||
| 399 | + return this.flatMapLatest { (pub, track) -> | ||
| 400 | + if (pub == null) { | ||
| 401 | + flowOf(false to track) | ||
| 402 | + } else { | ||
| 403 | + pub::muted.flow | ||
| 404 | + .map { muted -> muted to track } | ||
| 405 | + } | ||
| 406 | + }.map { (muted, track) -> (!muted && track != null) } | ||
| 385 | } | 407 | } |
| 386 | 408 | ||
| 387 | /** | 409 | /** |
| @@ -32,6 +32,7 @@ import io.livekit.android.room.track.VideoCaptureParameter | @@ -32,6 +32,7 @@ import io.livekit.android.room.track.VideoCaptureParameter | ||
| 32 | import io.livekit.android.room.track.VideoCodec | 32 | import io.livekit.android.room.track.VideoCodec |
| 33 | import io.livekit.android.test.MockE2ETest | 33 | import io.livekit.android.test.MockE2ETest |
| 34 | import io.livekit.android.test.assert.assertIsClassList | 34 | import io.livekit.android.test.assert.assertIsClassList |
| 35 | +import io.livekit.android.test.coroutines.toListUntilSignal | ||
| 35 | import io.livekit.android.test.events.EventCollector | 36 | import io.livekit.android.test.events.EventCollector |
| 36 | import io.livekit.android.test.mock.MockAudioProcessingController | 37 | import io.livekit.android.test.mock.MockAudioProcessingController |
| 37 | import io.livekit.android.test.mock.MockEglBase | 38 | import io.livekit.android.test.mock.MockEglBase |
| @@ -41,11 +42,14 @@ import io.livekit.android.test.mock.TestData | @@ -41,11 +42,14 @@ import io.livekit.android.test.mock.TestData | ||
| 41 | import io.livekit.android.test.mock.camera.MockCameraProvider | 42 | import io.livekit.android.test.mock.camera.MockCameraProvider |
| 42 | import io.livekit.android.test.mock.room.track.createMockLocalAudioTrack | 43 | import io.livekit.android.test.mock.room.track.createMockLocalAudioTrack |
| 43 | import io.livekit.android.test.util.toPBByteString | 44 | import io.livekit.android.test.util.toPBByteString |
| 45 | +import io.livekit.android.util.flow | ||
| 44 | import io.livekit.android.util.toOkioByteString | 46 | import io.livekit.android.util.toOkioByteString |
| 45 | import kotlinx.coroutines.CoroutineScope | 47 | import kotlinx.coroutines.CoroutineScope |
| 46 | import kotlinx.coroutines.ExperimentalCoroutinesApi | 48 | import kotlinx.coroutines.ExperimentalCoroutinesApi |
| 47 | import kotlinx.coroutines.Job | 49 | import kotlinx.coroutines.Job |
| 50 | +import kotlinx.coroutines.async | ||
| 48 | import kotlinx.coroutines.cancel | 51 | import kotlinx.coroutines.cancel |
| 52 | +import kotlinx.coroutines.flow.MutableStateFlow | ||
| 49 | import kotlinx.coroutines.launch | 53 | import kotlinx.coroutines.launch |
| 50 | import kotlinx.coroutines.test.StandardTestDispatcher | 54 | import kotlinx.coroutines.test.StandardTestDispatcher |
| 51 | import kotlinx.coroutines.test.advanceUntilIdle | 55 | import kotlinx.coroutines.test.advanceUntilIdle |
| @@ -608,4 +612,39 @@ class LocalParticipantMockE2ETest : MockE2ETest() { | @@ -608,4 +612,39 @@ class LocalParticipantMockE2ETest : MockE2ETest() { | ||
| 608 | coroutineRule.dispatcher.scheduler.advanceUntilIdle() | 612 | coroutineRule.dispatcher.scheduler.advanceUntilIdle() |
| 609 | assertTrue(!didThrow && success == false) | 613 | assertTrue(!didThrow && success == false) |
| 610 | } | 614 | } |
| 615 | + | ||
| 616 | + @Test | ||
| 617 | + fun isMicrophoneEnabled() = runTest { | ||
| 618 | + connect() | ||
| 619 | + | ||
| 620 | + room.localParticipant.publishAudioTrack( | ||
| 621 | + track = createMockLocalAudioTrack(), | ||
| 622 | + ) | ||
| 623 | + | ||
| 624 | + advanceUntilIdle() | ||
| 625 | + | ||
| 626 | + assertTrue(room.localParticipant.isMicrophoneEnabled) | ||
| 627 | + } | ||
| 628 | + | ||
| 629 | + @Test | ||
| 630 | + fun microphoneEnabledFlow() = runTest { | ||
| 631 | + connect() | ||
| 632 | + | ||
| 633 | + val signal = MutableStateFlow<Unit?>(null) | ||
| 634 | + val job = async { | ||
| 635 | + room.localParticipant::isMicrophoneEnabled.flow | ||
| 636 | + .toListUntilSignal(signal) | ||
| 637 | + } | ||
| 638 | + room.localParticipant.publishAudioTrack( | ||
| 639 | + track = createMockLocalAudioTrack(), | ||
| 640 | + ) | ||
| 641 | + | ||
| 642 | + room.localParticipant.setMicrophoneEnabled(false) | ||
| 643 | + signal.compareAndSet(null, Unit) | ||
| 644 | + val collectedList = job.await() | ||
| 645 | + assertEquals(3, collectedList.size) | ||
| 646 | + assertFalse(collectedList[0]) | ||
| 647 | + assertTrue(collectedList[1]) | ||
| 648 | + assertFalse(collectedList[2]) | ||
| 649 | + } | ||
| 611 | } | 650 | } |
| @@ -52,6 +52,7 @@ import io.livekit.android.sample.model.StressTest | @@ -52,6 +52,7 @@ import io.livekit.android.sample.model.StressTest | ||
| 52 | import io.livekit.android.sample.service.ForegroundService | 52 | import io.livekit.android.sample.service.ForegroundService |
| 53 | import io.livekit.android.util.LKLog | 53 | import io.livekit.android.util.LKLog |
| 54 | import io.livekit.android.util.flow | 54 | import io.livekit.android.util.flow |
| 55 | +import kotlinx.coroutines.Dispatchers | ||
| 55 | import kotlinx.coroutines.coroutineScope | 56 | import kotlinx.coroutines.coroutineScope |
| 56 | import kotlinx.coroutines.delay | 57 | import kotlinx.coroutines.delay |
| 57 | import kotlinx.coroutines.flow.Flow | 58 | import kotlinx.coroutines.flow.Flow |
| @@ -125,14 +126,9 @@ class CallViewModel( | @@ -125,14 +126,9 @@ class CallViewModel( | ||
| 125 | private var localScreencastTrack: LocalScreencastVideoTrack? = null | 126 | private var localScreencastTrack: LocalScreencastVideoTrack? = null |
| 126 | 127 | ||
| 127 | // Controls | 128 | // Controls |
| 128 | - private val mutableMicEnabled = MutableLiveData(true) | ||
| 129 | - val micEnabled = mutableMicEnabled.hide() | ||
| 130 | - | ||
| 131 | - private val mutableCameraEnabled = MutableLiveData(true) | ||
| 132 | - val cameraEnabled = mutableCameraEnabled.hide() | ||
| 133 | - | ||
| 134 | - private val mutableScreencastEnabled = MutableLiveData(false) | ||
| 135 | - val screenshareEnabled = mutableScreencastEnabled.hide() | 129 | + val micEnabled = room.localParticipant::isMicrophoneEnabled.flow |
| 130 | + val cameraEnabled = room.localParticipant::isCameraEnabled.flow | ||
| 131 | + val screenshareEnabled = room.localParticipant::isScreenShareEnabled.flow | ||
| 136 | 132 | ||
| 137 | private val mutableEnhancedNsEnabled = MutableLiveData(false) | 133 | private val mutableEnhancedNsEnabled = MutableLiveData(false) |
| 138 | val enhancedNsEnabled = mutableEnhancedNsEnabled.hide() | 134 | val enhancedNsEnabled = mutableEnhancedNsEnabled.hide() |
| @@ -157,7 +153,7 @@ class CallViewModel( | @@ -157,7 +153,7 @@ class CallViewModel( | ||
| 157 | } | 153 | } |
| 158 | } | 154 | } |
| 159 | 155 | ||
| 160 | - viewModelScope.launch { | 156 | + viewModelScope.launch(Dispatchers.Default) { |
| 161 | // Collect any errors. | 157 | // Collect any errors. |
| 162 | launch { | 158 | launch { |
| 163 | error.collect { Timber.e(it) } | 159 | error.collect { Timber.e(it) } |
| @@ -256,10 +252,8 @@ class CallViewModel( | @@ -256,10 +252,8 @@ class CallViewModel( | ||
| 256 | // Create and publish audio/video tracks | 252 | // Create and publish audio/video tracks |
| 257 | val localParticipant = room.localParticipant | 253 | val localParticipant = room.localParticipant |
| 258 | localParticipant.setMicrophoneEnabled(true) | 254 | localParticipant.setMicrophoneEnabled(true) |
| 259 | - mutableMicEnabled.postValue(localParticipant.isMicrophoneEnabled()) | ||
| 260 | 255 | ||
| 261 | localParticipant.setCameraEnabled(true) | 256 | localParticipant.setCameraEnabled(true) |
| 262 | - mutableCameraEnabled.postValue(localParticipant.isCameraEnabled()) | ||
| 263 | 257 | ||
| 264 | // Update the speaker | 258 | // Update the speaker |
| 265 | handlePrimarySpeaker(emptyList(), emptyList(), room) | 259 | handlePrimarySpeaker(emptyList(), emptyList(), room) |
| @@ -310,20 +304,18 @@ class CallViewModel( | @@ -310,20 +304,18 @@ class CallViewModel( | ||
| 310 | */ | 304 | */ |
| 311 | fun startScreenCapture(mediaProjectionPermissionResultData: Intent) { | 305 | fun startScreenCapture(mediaProjectionPermissionResultData: Intent) { |
| 312 | val localParticipant = room.localParticipant | 306 | val localParticipant = room.localParticipant |
| 313 | - viewModelScope.launch { | 307 | + viewModelScope.launch(Dispatchers.IO) { |
| 314 | localParticipant.setScreenShareEnabled(true, ScreenCaptureParams(mediaProjectionPermissionResultData)) | 308 | localParticipant.setScreenShareEnabled(true, ScreenCaptureParams(mediaProjectionPermissionResultData)) |
| 315 | val screencastTrack = localParticipant.getTrackPublication(Track.Source.SCREEN_SHARE)?.track as? LocalScreencastVideoTrack | 309 | val screencastTrack = localParticipant.getTrackPublication(Track.Source.SCREEN_SHARE)?.track as? LocalScreencastVideoTrack |
| 316 | this@CallViewModel.localScreencastTrack = screencastTrack | 310 | this@CallViewModel.localScreencastTrack = screencastTrack |
| 317 | - mutableScreencastEnabled.postValue(screencastTrack?.enabled) | ||
| 318 | } | 311 | } |
| 319 | } | 312 | } |
| 320 | 313 | ||
| 321 | fun stopScreenCapture() { | 314 | fun stopScreenCapture() { |
| 322 | - viewModelScope.launch { | 315 | + viewModelScope.launch(Dispatchers.IO) { |
| 323 | localScreencastTrack?.let { localScreencastVideoTrack -> | 316 | localScreencastTrack?.let { localScreencastVideoTrack -> |
| 324 | localScreencastVideoTrack.stop() | 317 | localScreencastVideoTrack.stop() |
| 325 | room.localParticipant.unpublishTrack(localScreencastVideoTrack) | 318 | room.localParticipant.unpublishTrack(localScreencastVideoTrack) |
| 326 | - mutableScreencastEnabled.postValue(localScreencastTrack?.enabled ?: false) | ||
| 327 | } | 319 | } |
| 328 | } | 320 | } |
| 329 | } | 321 | } |
| @@ -345,16 +337,14 @@ class CallViewModel( | @@ -345,16 +337,14 @@ class CallViewModel( | ||
| 345 | } | 337 | } |
| 346 | 338 | ||
| 347 | fun setMicEnabled(enabled: Boolean) { | 339 | fun setMicEnabled(enabled: Boolean) { |
| 348 | - viewModelScope.launch { | 340 | + viewModelScope.launch(Dispatchers.IO) { |
| 349 | room.localParticipant.setMicrophoneEnabled(enabled) | 341 | room.localParticipant.setMicrophoneEnabled(enabled) |
| 350 | - mutableMicEnabled.postValue(enabled) | ||
| 351 | } | 342 | } |
| 352 | } | 343 | } |
| 353 | 344 | ||
| 354 | fun setCameraEnabled(enabled: Boolean) { | 345 | fun setCameraEnabled(enabled: Boolean) { |
| 355 | - viewModelScope.launch { | 346 | + viewModelScope.launch(Dispatchers.IO) { |
| 356 | room.localParticipant.setCameraEnabled(enabled) | 347 | room.localParticipant.setCameraEnabled(enabled) |
| 357 | - mutableCameraEnabled.postValue(enabled) | ||
| 358 | } | 348 | } |
| 359 | } | 349 | } |
| 360 | 350 | ||
| @@ -377,7 +367,7 @@ class CallViewModel( | @@ -377,7 +367,7 @@ class CallViewModel( | ||
| 377 | } | 367 | } |
| 378 | 368 | ||
| 379 | fun sendData(message: String) { | 369 | fun sendData(message: String) { |
| 380 | - viewModelScope.launch { | 370 | + viewModelScope.launch(Dispatchers.IO) { |
| 381 | room.localParticipant.publishData(message.toByteArray(Charsets.UTF_8)) | 371 | room.localParticipant.publishData(message.toByteArray(Charsets.UTF_8)) |
| 382 | } | 372 | } |
| 383 | } | 373 | } |
| @@ -408,13 +398,13 @@ class CallViewModel( | @@ -408,13 +398,13 @@ class CallViewModel( | ||
| 408 | Timber.e { "Reconnecting." } | 398 | Timber.e { "Reconnecting." } |
| 409 | mutablePrimarySpeaker.value = null | 399 | mutablePrimarySpeaker.value = null |
| 410 | room.disconnect() | 400 | room.disconnect() |
| 411 | - viewModelScope.launch { | 401 | + viewModelScope.launch(Dispatchers.IO) { |
| 412 | connectToRoom() | 402 | connectToRoom() |
| 413 | } | 403 | } |
| 414 | } | 404 | } |
| 415 | 405 | ||
| 416 | private suspend fun StressTest.SwitchRoom.execute() = coroutineScope { | 406 | private suspend fun StressTest.SwitchRoom.execute() = coroutineScope { |
| 417 | - launch { | 407 | + launch(Dispatchers.Default) { |
| 418 | while (isActive) { | 408 | while (isActive) { |
| 419 | delay(2000) | 409 | delay(2000) |
| 420 | dumpReferenceTables() | 410 | dumpReferenceTables() |
| @@ -423,12 +413,12 @@ class CallViewModel( | @@ -423,12 +413,12 @@ class CallViewModel( | ||
| 423 | 413 | ||
| 424 | while (isActive) { | 414 | while (isActive) { |
| 425 | Timber.d { "Stress test -> connect to first room" } | 415 | Timber.d { "Stress test -> connect to first room" } |
| 426 | - launch { quickConnectToRoom(firstToken) } | 416 | + launch(Dispatchers.IO) { quickConnectToRoom(firstToken) } |
| 427 | delay(200) | 417 | delay(200) |
| 428 | room.disconnect() | 418 | room.disconnect() |
| 429 | delay(50) | 419 | delay(50) |
| 430 | Timber.d { "Stress test -> connect to second room" } | 420 | Timber.d { "Stress test -> connect to second room" } |
| 431 | - launch { quickConnectToRoom(secondToken) } | 421 | + launch(Dispatchers.IO) { quickConnectToRoom(secondToken) } |
| 432 | delay(200) | 422 | delay(200) |
| 433 | room.disconnect() | 423 | room.disconnect() |
| 434 | delay(50) | 424 | delay(50) |
| 1 | /* | 1 | /* |
| 2 | - * Copyright 2023-2024 LiveKit, Inc. | 2 | + * Copyright 2023-2025 LiveKit, Inc. |
| 3 | * | 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); | 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. | 5 | * you may not use this file except in compliance with the License. |
| @@ -53,7 +53,6 @@ import androidx.compose.material.rememberScaffoldState | @@ -53,7 +53,6 @@ import androidx.compose.material.rememberScaffoldState | ||
| 53 | import androidx.compose.runtime.Composable | 53 | import androidx.compose.runtime.Composable |
| 54 | import androidx.compose.runtime.collectAsState | 54 | import androidx.compose.runtime.collectAsState |
| 55 | import androidx.compose.runtime.getValue | 55 | import androidx.compose.runtime.getValue |
| 56 | -import androidx.compose.runtime.livedata.observeAsState | ||
| 57 | import androidx.compose.runtime.mutableStateOf | 56 | import androidx.compose.runtime.mutableStateOf |
| 58 | import androidx.compose.runtime.remember | 57 | import androidx.compose.runtime.remember |
| 59 | import androidx.compose.runtime.rememberCoroutineScope | 58 | import androidx.compose.runtime.rememberCoroutineScope |
| @@ -118,9 +117,9 @@ class CallActivity : AppCompatActivity() { | @@ -118,9 +117,9 @@ class CallActivity : AppCompatActivity() { | ||
| 118 | val participants by viewModel.participants.collectAsState(initial = emptyList()) | 117 | val participants by viewModel.participants.collectAsState(initial = emptyList()) |
| 119 | val primarySpeaker by viewModel.primarySpeaker.collectAsState() | 118 | val primarySpeaker by viewModel.primarySpeaker.collectAsState() |
| 120 | val activeSpeakers by viewModel.activeSpeakers.collectAsState(initial = emptyList()) | 119 | val activeSpeakers by viewModel.activeSpeakers.collectAsState(initial = emptyList()) |
| 121 | - val micEnabled by viewModel.micEnabled.observeAsState(true) | ||
| 122 | - val videoEnabled by viewModel.cameraEnabled.observeAsState(true) | ||
| 123 | - val screencastEnabled by viewModel.screenshareEnabled.observeAsState(false) | 120 | + val micEnabled by viewModel.micEnabled.collectAsState(true) |
| 121 | + val videoEnabled by viewModel.cameraEnabled.collectAsState(true) | ||
| 122 | + val screencastEnabled by viewModel.screenshareEnabled.collectAsState(false) | ||
| 124 | val permissionAllowed by viewModel.permissionAllowed.collectAsState() | 123 | val permissionAllowed by viewModel.permissionAllowed.collectAsState() |
| 125 | Content( | 124 | Content( |
| 126 | room, | 125 | room, |
| 1 | /* | 1 | /* |
| 2 | - * Copyright 2023-2024 LiveKit, Inc. | 2 | + * Copyright 2023-2025 LiveKit, Inc. |
| 3 | * | 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); | 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. | 5 | * you may not use this file except in compliance with the License. |
| @@ -53,7 +53,8 @@ fun ParticipantItem( | @@ -53,7 +53,8 @@ fun ParticipantItem( | ||
| 53 | val audioTracks by participant::audioTrackPublications.flow.collectAsState() | 53 | val audioTracks by participant::audioTrackPublications.flow.collectAsState() |
| 54 | val identityBarPadding = 4.dp | 54 | val identityBarPadding = 4.dp |
| 55 | ConstraintLayout( | 55 | ConstraintLayout( |
| 56 | - modifier = modifier.background(NoVideoBackground) | 56 | + modifier = modifier |
| 57 | + .background(NoVideoBackground) | ||
| 57 | .run { | 58 | .run { |
| 58 | if (isSpeaking) { | 59 | if (isSpeaking) { |
| 59 | border(2.dp, BlueMain) | 60 | border(2.dp, BlueMain) |
| @@ -101,9 +102,9 @@ fun ParticipantItem( | @@ -101,9 +102,9 @@ fun ParticipantItem( | ||
| 101 | }, | 102 | }, |
| 102 | ) | 103 | ) |
| 103 | 104 | ||
| 104 | - val isMuted = audioTracks.none { (pub) -> pub.track != null && !pub.muted } | 105 | + val isMicEnabled by participant::isMicrophoneEnabled.flow.collectAsState() |
| 105 | 106 | ||
| 106 | - if (isMuted) { | 107 | + if (!isMicEnabled) { |
| 107 | Icon( | 108 | Icon( |
| 108 | painter = painterResource(id = R.drawable.outline_mic_off_24), | 109 | painter = painterResource(id = R.drawable.outline_mic_off_24), |
| 109 | contentDescription = "", | 110 | contentDescription = "", |
| 1 | /* | 1 | /* |
| 2 | - * Copyright 2023-2024 LiveKit, Inc. | 2 | + * Copyright 2023-2025 LiveKit, Inc. |
| 3 | * | 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); | 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. | 5 | * you may not use this file except in compliance with the License. |
| @@ -26,7 +26,9 @@ import android.widget.Toast | @@ -26,7 +26,9 @@ import android.widget.Toast | ||
| 26 | import androidx.activity.result.contract.ActivityResultContracts | 26 | import androidx.activity.result.contract.ActivityResultContracts |
| 27 | import androidx.appcompat.app.AlertDialog | 27 | import androidx.appcompat.app.AlertDialog |
| 28 | import androidx.appcompat.app.AppCompatActivity | 28 | import androidx.appcompat.app.AppCompatActivity |
| 29 | +import androidx.lifecycle.Lifecycle | ||
| 29 | import androidx.lifecycle.lifecycleScope | 30 | import androidx.lifecycle.lifecycleScope |
| 31 | +import androidx.lifecycle.repeatOnLifecycle | ||
| 30 | import androidx.recyclerview.widget.LinearLayoutManager | 32 | import androidx.recyclerview.widget.LinearLayoutManager |
| 31 | import com.xwray.groupie.GroupieAdapter | 33 | import com.xwray.groupie.GroupieAdapter |
| 32 | import io.livekit.android.sample.common.R | 34 | import io.livekit.android.sample.common.R |
| @@ -36,6 +38,7 @@ import io.livekit.android.sample.dialog.showDebugMenuDialog | @@ -36,6 +38,7 @@ import io.livekit.android.sample.dialog.showDebugMenuDialog | ||
| 36 | import io.livekit.android.sample.dialog.showSelectAudioDeviceDialog | 38 | import io.livekit.android.sample.dialog.showSelectAudioDeviceDialog |
| 37 | import io.livekit.android.sample.model.StressTest | 39 | import io.livekit.android.sample.model.StressTest |
| 38 | import kotlinx.coroutines.flow.collectLatest | 40 | import kotlinx.coroutines.flow.collectLatest |
| 41 | +import kotlinx.coroutines.launch | ||
| 39 | import kotlinx.parcelize.Parcelize | 42 | import kotlinx.parcelize.Parcelize |
| 40 | 43 | ||
| 41 | class CallActivity : AppCompatActivity() { | 44 | class CallActivity : AppCompatActivity() { |
| @@ -81,12 +84,14 @@ class CallActivity : AppCompatActivity() { | @@ -81,12 +84,14 @@ class CallActivity : AppCompatActivity() { | ||
| 81 | adapter = audienceAdapter | 84 | adapter = audienceAdapter |
| 82 | } | 85 | } |
| 83 | 86 | ||
| 84 | - lifecycleScope.launchWhenCreated { | ||
| 85 | - viewModel.participants | ||
| 86 | - .collect { participants -> | ||
| 87 | - val items = participants.map { participant -> ParticipantItem(viewModel.room, participant) } | ||
| 88 | - audienceAdapter.update(items) | ||
| 89 | - } | 87 | + lifecycleScope.launch { |
| 88 | + repeatOnLifecycle(Lifecycle.State.CREATED) { | ||
| 89 | + viewModel.participants | ||
| 90 | + .collect { participants -> | ||
| 91 | + val items = participants.map { participant -> ParticipantItem(viewModel.room, participant) } | ||
| 92 | + audienceAdapter.update(items) | ||
| 93 | + } | ||
| 94 | + } | ||
| 90 | } | 95 | } |
| 91 | 96 | ||
| 92 | // speaker view setup | 97 | // speaker view setup |
| @@ -95,53 +100,69 @@ class CallActivity : AppCompatActivity() { | @@ -95,53 +100,69 @@ class CallActivity : AppCompatActivity() { | ||
| 95 | layoutManager = LinearLayoutManager(this@CallActivity, LinearLayoutManager.HORIZONTAL, false) | 100 | layoutManager = LinearLayoutManager(this@CallActivity, LinearLayoutManager.HORIZONTAL, false) |
| 96 | adapter = speakerAdapter | 101 | adapter = speakerAdapter |
| 97 | } | 102 | } |
| 98 | - lifecycleScope.launchWhenCreated { | ||
| 99 | - viewModel.primarySpeaker.collectLatest { speaker -> | ||
| 100 | - val items = listOfNotNull(speaker) | ||
| 101 | - .map { participant -> ParticipantItem(viewModel.room, participant, speakerView = true) } | ||
| 102 | - speakerAdapter.update(items) | 103 | + lifecycleScope.launch { |
| 104 | + repeatOnLifecycle(Lifecycle.State.CREATED) { | ||
| 105 | + viewModel.primarySpeaker.collectLatest { speaker -> | ||
| 106 | + val items = listOfNotNull(speaker) | ||
| 107 | + .map { participant -> ParticipantItem(viewModel.room, participant, speakerView = true) } | ||
| 108 | + speakerAdapter.update(items) | ||
| 109 | + } | ||
| 103 | } | 110 | } |
| 104 | } | 111 | } |
| 105 | 112 | ||
| 106 | // Controls setup | 113 | // Controls setup |
| 107 | - viewModel.cameraEnabled.observe(this) { enabled -> | ||
| 108 | - binding.camera.setOnClickListener { viewModel.setCameraEnabled(!enabled) } | ||
| 109 | - binding.camera.setImageResource( | ||
| 110 | - if (enabled) { | ||
| 111 | - R.drawable.outline_videocam_24 | ||
| 112 | - } else { | ||
| 113 | - R.drawable.outline_videocam_off_24 | ||
| 114 | - }, | ||
| 115 | - ) | ||
| 116 | - binding.flipCamera.isEnabled = enabled | ||
| 117 | - } | ||
| 118 | - viewModel.micEnabled.observe(this) { enabled -> | ||
| 119 | - binding.mic.setOnClickListener { viewModel.setMicEnabled(!enabled) } | ||
| 120 | - binding.mic.setImageResource( | ||
| 121 | - if (enabled) { | ||
| 122 | - R.drawable.outline_mic_24 | ||
| 123 | - } else { | ||
| 124 | - R.drawable.outline_mic_off_24 | ||
| 125 | - }, | ||
| 126 | - ) | 114 | + lifecycleScope.launch { |
| 115 | + repeatOnLifecycle(Lifecycle.State.CREATED) { | ||
| 116 | + viewModel.cameraEnabled.collect { enabled -> | ||
| 117 | + binding.camera.setOnClickListener { viewModel.setCameraEnabled(!enabled) } | ||
| 118 | + binding.camera.setImageResource( | ||
| 119 | + if (enabled) { | ||
| 120 | + R.drawable.outline_videocam_24 | ||
| 121 | + } else { | ||
| 122 | + R.drawable.outline_videocam_off_24 | ||
| 123 | + }, | ||
| 124 | + ) | ||
| 125 | + binding.flipCamera.isEnabled = enabled | ||
| 126 | + } | ||
| 127 | + } | ||
| 128 | + } | ||
| 129 | + | ||
| 130 | + lifecycleScope.launch { | ||
| 131 | + repeatOnLifecycle(Lifecycle.State.CREATED) { | ||
| 132 | + viewModel.micEnabled.collect { enabled -> | ||
| 133 | + binding.mic.setOnClickListener { viewModel.setMicEnabled(!enabled) } | ||
| 134 | + binding.mic.setImageResource( | ||
| 135 | + if (enabled) { | ||
| 136 | + R.drawable.outline_mic_24 | ||
| 137 | + } else { | ||
| 138 | + R.drawable.outline_mic_off_24 | ||
| 139 | + }, | ||
| 140 | + ) | ||
| 141 | + } | ||
| 142 | + } | ||
| 127 | } | 143 | } |
| 128 | 144 | ||
| 129 | binding.flipCamera.setOnClickListener { viewModel.flipCamera() } | 145 | binding.flipCamera.setOnClickListener { viewModel.flipCamera() } |
| 130 | - viewModel.screenshareEnabled.observe(this) { enabled -> | ||
| 131 | - binding.screenShare.setOnClickListener { | ||
| 132 | - if (enabled) { | ||
| 133 | - viewModel.stopScreenCapture() | ||
| 134 | - } else { | ||
| 135 | - requestMediaProjection() | 146 | + |
| 147 | + lifecycleScope.launch { | ||
| 148 | + repeatOnLifecycle(Lifecycle.State.CREATED) { | ||
| 149 | + viewModel.screenshareEnabled.collect { enabled -> | ||
| 150 | + binding.screenShare.setOnClickListener { | ||
| 151 | + if (enabled) { | ||
| 152 | + viewModel.stopScreenCapture() | ||
| 153 | + } else { | ||
| 154 | + requestMediaProjection() | ||
| 155 | + } | ||
| 156 | + } | ||
| 157 | + binding.screenShare.setImageResource( | ||
| 158 | + if (enabled) { | ||
| 159 | + R.drawable.baseline_cast_connected_24 | ||
| 160 | + } else { | ||
| 161 | + R.drawable.baseline_cast_24 | ||
| 162 | + }, | ||
| 163 | + ) | ||
| 136 | } | 164 | } |
| 137 | } | 165 | } |
| 138 | - binding.screenShare.setImageResource( | ||
| 139 | - if (enabled) { | ||
| 140 | - R.drawable.baseline_cast_connected_24 | ||
| 141 | - } else { | ||
| 142 | - R.drawable.baseline_cast_24 | ||
| 143 | - }, | ||
| 144 | - ) | ||
| 145 | } | 166 | } |
| 146 | 167 | ||
| 147 | binding.message.setOnClickListener { | 168 | binding.message.setOnClickListener { |
| 1 | /* | 1 | /* |
| 2 | - * Copyright 2024 LiveKit, Inc. | 2 | + * Copyright 2024-2025 LiveKit, Inc. |
| 3 | * | 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); | 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. | 5 | * you may not use this file except in compliance with the License. |
| @@ -82,17 +82,9 @@ class ParticipantItem( | @@ -82,17 +82,9 @@ class ParticipantItem( | ||
| 82 | } | 82 | } |
| 83 | } | 83 | } |
| 84 | coroutineScope?.launch { | 84 | coroutineScope?.launch { |
| 85 | - participant::audioTrackPublications.flow | ||
| 86 | - .flatMapLatest { tracks -> | ||
| 87 | - val audioTrack = tracks.firstOrNull()?.first | ||
| 88 | - if (audioTrack != null) { | ||
| 89 | - audioTrack::muted.flow | ||
| 90 | - } else { | ||
| 91 | - flowOf(true) | ||
| 92 | - } | ||
| 93 | - } | ||
| 94 | - .collect { muted -> | ||
| 95 | - viewBinding.muteIndicator.visibility = if (muted) View.VISIBLE else View.INVISIBLE | 85 | + participant::isMicrophoneEnabled.flow |
| 86 | + .collect { isMicEnabled -> | ||
| 87 | + viewBinding.muteIndicator.visibility = if (isMicEnabled) View.VISIBLE else View.INVISIBLE | ||
| 96 | } | 88 | } |
| 97 | } | 89 | } |
| 98 | coroutineScope?.launch { | 90 | coroutineScope?.launch { |
-
请 注册 或 登录 后发表评论