davidliu
Committed by GitHub

FlowObservable isTrackEnabled variables (#685)

* Change isMicrophoneEnabled, isCameraEnabled, isScreenshareEnabled to FlowObservable variables

* Compile fixes

* spotless
---
"client-sdk-android": minor
---
Change isMicrophoneEnabled, isCameraEnabled, isScreenshareEnabled to FlowObservable variables
... ...
... ... @@ -62,6 +62,7 @@ import kotlinx.coroutines.Job
import kotlinx.coroutines.async
import kotlinx.coroutines.coroutineScope
import kotlinx.coroutines.delay
import kotlinx.coroutines.flow.map
import kotlinx.coroutines.launch
import kotlinx.coroutines.suspendCancellableCoroutine
import kotlinx.coroutines.sync.Mutex
... ...
... ... @@ -32,6 +32,7 @@ import io.livekit.android.util.flow
import io.livekit.android.util.flowDelegate
import kotlinx.coroutines.CoroutineDispatcher
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.ExperimentalCoroutinesApi
import kotlinx.coroutines.SupervisorJob
import kotlinx.coroutines.cancel
import kotlinx.coroutines.flow.Flow
... ... @@ -270,6 +271,7 @@ open class Participant(
var trackPublications by flowDelegate(emptyMap<String, TrackPublication>())
protected set
@OptIn(ExperimentalCoroutinesApi::class)
private fun Flow<Map<String, TrackPublication>>.trackUpdateFlow(): Flow<List<Pair<TrackPublication, Track?>>> {
return flatMapLatest { videoTracks ->
if (videoTracks.isEmpty()) {
... ... @@ -365,23 +367,43 @@ open class Participant(
return null
}
fun isCameraEnabled(): Boolean {
val pub = getTrackPublication(Track.Source.CAMERA)
return isTrackPublicationEnabled(pub)
}
@FlowObservable
@get:FlowObservable
val isMicrophoneEnabled by flowDelegate(
stateFlow = ::audioTrackPublications.flow
.map { it.firstOrNull { (pub, _) -> pub.source == Track.Source.MICROPHONE } ?: (null to null) }
.isTrackEnabledDetector()
.stateIn(delegateScope, SharingStarted.Eagerly, false),
)
fun isMicrophoneEnabled(): Boolean {
val pub = getTrackPublication(Track.Source.MICROPHONE)
return isTrackPublicationEnabled(pub)
}
@FlowObservable
@get:FlowObservable
val isCameraEnabled by flowDelegate(
stateFlow = ::videoTrackPublications.flow
.map { it.firstOrNull { (pub, _) -> pub.source == Track.Source.CAMERA } ?: (null to null) }
.isTrackEnabledDetector()
.stateIn(delegateScope, SharingStarted.Eagerly, false),
)
fun isScreenShareEnabled(): Boolean {
val pub = getTrackPublication(Track.Source.SCREEN_SHARE)
return isTrackPublicationEnabled(pub)
}
@FlowObservable
@get:FlowObservable
val isScreenShareEnabled by flowDelegate(
stateFlow = ::videoTrackPublications.flow
.map { it.firstOrNull { (pub, _) -> pub.source == Track.Source.SCREEN_SHARE } ?: (null to null) }
.isTrackEnabledDetector()
.stateIn(delegateScope, SharingStarted.Eagerly, false),
)
private fun isTrackPublicationEnabled(pub: TrackPublication?): Boolean {
return !(pub?.muted ?: true)
@OptIn(ExperimentalCoroutinesApi::class)
private fun Flow<Pair<TrackPublication?, Track?>>.isTrackEnabledDetector(): Flow<Boolean> {
return this.flatMapLatest { (pub, track) ->
if (pub == null) {
flowOf(false to track)
} else {
pub::muted.flow
.map { muted -> muted to track }
}
}.map { (muted, track) -> (!muted && track != null) }
}
/**
... ...
... ... @@ -32,6 +32,7 @@ import io.livekit.android.room.track.VideoCaptureParameter
import io.livekit.android.room.track.VideoCodec
import io.livekit.android.test.MockE2ETest
import io.livekit.android.test.assert.assertIsClassList
import io.livekit.android.test.coroutines.toListUntilSignal
import io.livekit.android.test.events.EventCollector
import io.livekit.android.test.mock.MockAudioProcessingController
import io.livekit.android.test.mock.MockEglBase
... ... @@ -41,11 +42,14 @@ import io.livekit.android.test.mock.TestData
import io.livekit.android.test.mock.camera.MockCameraProvider
import io.livekit.android.test.mock.room.track.createMockLocalAudioTrack
import io.livekit.android.test.util.toPBByteString
import io.livekit.android.util.flow
import io.livekit.android.util.toOkioByteString
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.ExperimentalCoroutinesApi
import kotlinx.coroutines.Job
import kotlinx.coroutines.async
import kotlinx.coroutines.cancel
import kotlinx.coroutines.flow.MutableStateFlow
import kotlinx.coroutines.launch
import kotlinx.coroutines.test.StandardTestDispatcher
import kotlinx.coroutines.test.advanceUntilIdle
... ... @@ -608,4 +612,39 @@ class LocalParticipantMockE2ETest : MockE2ETest() {
coroutineRule.dispatcher.scheduler.advanceUntilIdle()
assertTrue(!didThrow && success == false)
}
@Test
fun isMicrophoneEnabled() = runTest {
connect()
room.localParticipant.publishAudioTrack(
track = createMockLocalAudioTrack(),
)
advanceUntilIdle()
assertTrue(room.localParticipant.isMicrophoneEnabled)
}
@Test
fun microphoneEnabledFlow() = runTest {
connect()
val signal = MutableStateFlow<Unit?>(null)
val job = async {
room.localParticipant::isMicrophoneEnabled.flow
.toListUntilSignal(signal)
}
room.localParticipant.publishAudioTrack(
track = createMockLocalAudioTrack(),
)
room.localParticipant.setMicrophoneEnabled(false)
signal.compareAndSet(null, Unit)
val collectedList = job.await()
assertEquals(3, collectedList.size)
assertFalse(collectedList[0])
assertTrue(collectedList[1])
assertFalse(collectedList[2])
}
}
... ...
... ... @@ -52,6 +52,7 @@ import io.livekit.android.sample.model.StressTest
import io.livekit.android.sample.service.ForegroundService
import io.livekit.android.util.LKLog
import io.livekit.android.util.flow
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.coroutineScope
import kotlinx.coroutines.delay
import kotlinx.coroutines.flow.Flow
... ... @@ -125,14 +126,9 @@ class CallViewModel(
private var localScreencastTrack: LocalScreencastVideoTrack? = null
// Controls
private val mutableMicEnabled = MutableLiveData(true)
val micEnabled = mutableMicEnabled.hide()
private val mutableCameraEnabled = MutableLiveData(true)
val cameraEnabled = mutableCameraEnabled.hide()
private val mutableScreencastEnabled = MutableLiveData(false)
val screenshareEnabled = mutableScreencastEnabled.hide()
val micEnabled = room.localParticipant::isMicrophoneEnabled.flow
val cameraEnabled = room.localParticipant::isCameraEnabled.flow
val screenshareEnabled = room.localParticipant::isScreenShareEnabled.flow
private val mutableEnhancedNsEnabled = MutableLiveData(false)
val enhancedNsEnabled = mutableEnhancedNsEnabled.hide()
... ... @@ -157,7 +153,7 @@ class CallViewModel(
}
}
viewModelScope.launch {
viewModelScope.launch(Dispatchers.Default) {
// Collect any errors.
launch {
error.collect { Timber.e(it) }
... ... @@ -256,10 +252,8 @@ class CallViewModel(
// Create and publish audio/video tracks
val localParticipant = room.localParticipant
localParticipant.setMicrophoneEnabled(true)
mutableMicEnabled.postValue(localParticipant.isMicrophoneEnabled())
localParticipant.setCameraEnabled(true)
mutableCameraEnabled.postValue(localParticipant.isCameraEnabled())
// Update the speaker
handlePrimarySpeaker(emptyList(), emptyList(), room)
... ... @@ -310,20 +304,18 @@ class CallViewModel(
*/
fun startScreenCapture(mediaProjectionPermissionResultData: Intent) {
val localParticipant = room.localParticipant
viewModelScope.launch {
viewModelScope.launch(Dispatchers.IO) {
localParticipant.setScreenShareEnabled(true, ScreenCaptureParams(mediaProjectionPermissionResultData))
val screencastTrack = localParticipant.getTrackPublication(Track.Source.SCREEN_SHARE)?.track as? LocalScreencastVideoTrack
this@CallViewModel.localScreencastTrack = screencastTrack
mutableScreencastEnabled.postValue(screencastTrack?.enabled)
}
}
fun stopScreenCapture() {
viewModelScope.launch {
viewModelScope.launch(Dispatchers.IO) {
localScreencastTrack?.let { localScreencastVideoTrack ->
localScreencastVideoTrack.stop()
room.localParticipant.unpublishTrack(localScreencastVideoTrack)
mutableScreencastEnabled.postValue(localScreencastTrack?.enabled ?: false)
}
}
}
... ... @@ -345,16 +337,14 @@ class CallViewModel(
}
fun setMicEnabled(enabled: Boolean) {
viewModelScope.launch {
viewModelScope.launch(Dispatchers.IO) {
room.localParticipant.setMicrophoneEnabled(enabled)
mutableMicEnabled.postValue(enabled)
}
}
fun setCameraEnabled(enabled: Boolean) {
viewModelScope.launch {
viewModelScope.launch(Dispatchers.IO) {
room.localParticipant.setCameraEnabled(enabled)
mutableCameraEnabled.postValue(enabled)
}
}
... ... @@ -377,7 +367,7 @@ class CallViewModel(
}
fun sendData(message: String) {
viewModelScope.launch {
viewModelScope.launch(Dispatchers.IO) {
room.localParticipant.publishData(message.toByteArray(Charsets.UTF_8))
}
}
... ... @@ -408,13 +398,13 @@ class CallViewModel(
Timber.e { "Reconnecting." }
mutablePrimarySpeaker.value = null
room.disconnect()
viewModelScope.launch {
viewModelScope.launch(Dispatchers.IO) {
connectToRoom()
}
}
private suspend fun StressTest.SwitchRoom.execute() = coroutineScope {
launch {
launch(Dispatchers.Default) {
while (isActive) {
delay(2000)
dumpReferenceTables()
... ... @@ -423,12 +413,12 @@ class CallViewModel(
while (isActive) {
Timber.d { "Stress test -> connect to first room" }
launch { quickConnectToRoom(firstToken) }
launch(Dispatchers.IO) { quickConnectToRoom(firstToken) }
delay(200)
room.disconnect()
delay(50)
Timber.d { "Stress test -> connect to second room" }
launch { quickConnectToRoom(secondToken) }
launch(Dispatchers.IO) { quickConnectToRoom(secondToken) }
delay(200)
room.disconnect()
delay(50)
... ...
/*
* Copyright 2023-2024 LiveKit, Inc.
* Copyright 2023-2025 LiveKit, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
... ... @@ -53,7 +53,6 @@ import androidx.compose.material.rememberScaffoldState
import androidx.compose.runtime.Composable
import androidx.compose.runtime.collectAsState
import androidx.compose.runtime.getValue
import androidx.compose.runtime.livedata.observeAsState
import androidx.compose.runtime.mutableStateOf
import androidx.compose.runtime.remember
import androidx.compose.runtime.rememberCoroutineScope
... ... @@ -118,9 +117,9 @@ class CallActivity : AppCompatActivity() {
val participants by viewModel.participants.collectAsState(initial = emptyList())
val primarySpeaker by viewModel.primarySpeaker.collectAsState()
val activeSpeakers by viewModel.activeSpeakers.collectAsState(initial = emptyList())
val micEnabled by viewModel.micEnabled.observeAsState(true)
val videoEnabled by viewModel.cameraEnabled.observeAsState(true)
val screencastEnabled by viewModel.screenshareEnabled.observeAsState(false)
val micEnabled by viewModel.micEnabled.collectAsState(true)
val videoEnabled by viewModel.cameraEnabled.collectAsState(true)
val screencastEnabled by viewModel.screenshareEnabled.collectAsState(false)
val permissionAllowed by viewModel.permissionAllowed.collectAsState()
Content(
room,
... ...
/*
* Copyright 2023-2024 LiveKit, Inc.
* Copyright 2023-2025 LiveKit, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
... ... @@ -53,7 +53,8 @@ fun ParticipantItem(
val audioTracks by participant::audioTrackPublications.flow.collectAsState()
val identityBarPadding = 4.dp
ConstraintLayout(
modifier = modifier.background(NoVideoBackground)
modifier = modifier
.background(NoVideoBackground)
.run {
if (isSpeaking) {
border(2.dp, BlueMain)
... ... @@ -101,9 +102,9 @@ fun ParticipantItem(
},
)
val isMuted = audioTracks.none { (pub) -> pub.track != null && !pub.muted }
val isMicEnabled by participant::isMicrophoneEnabled.flow.collectAsState()
if (isMuted) {
if (!isMicEnabled) {
Icon(
painter = painterResource(id = R.drawable.outline_mic_off_24),
contentDescription = "",
... ...
/*
* Copyright 2023-2024 LiveKit, Inc.
* Copyright 2023-2025 LiveKit, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
... ... @@ -26,7 +26,9 @@ import android.widget.Toast
import androidx.activity.result.contract.ActivityResultContracts
import androidx.appcompat.app.AlertDialog
import androidx.appcompat.app.AppCompatActivity
import androidx.lifecycle.Lifecycle
import androidx.lifecycle.lifecycleScope
import androidx.lifecycle.repeatOnLifecycle
import androidx.recyclerview.widget.LinearLayoutManager
import com.xwray.groupie.GroupieAdapter
import io.livekit.android.sample.common.R
... ... @@ -36,6 +38,7 @@ import io.livekit.android.sample.dialog.showDebugMenuDialog
import io.livekit.android.sample.dialog.showSelectAudioDeviceDialog
import io.livekit.android.sample.model.StressTest
import kotlinx.coroutines.flow.collectLatest
import kotlinx.coroutines.launch
import kotlinx.parcelize.Parcelize
class CallActivity : AppCompatActivity() {
... ... @@ -81,13 +84,15 @@ class CallActivity : AppCompatActivity() {
adapter = audienceAdapter
}
lifecycleScope.launchWhenCreated {
lifecycleScope.launch {
repeatOnLifecycle(Lifecycle.State.CREATED) {
viewModel.participants
.collect { participants ->
val items = participants.map { participant -> ParticipantItem(viewModel.room, participant) }
audienceAdapter.update(items)
}
}
}
// speaker view setup
val speakerAdapter = GroupieAdapter()
... ... @@ -95,16 +100,20 @@ class CallActivity : AppCompatActivity() {
layoutManager = LinearLayoutManager(this@CallActivity, LinearLayoutManager.HORIZONTAL, false)
adapter = speakerAdapter
}
lifecycleScope.launchWhenCreated {
lifecycleScope.launch {
repeatOnLifecycle(Lifecycle.State.CREATED) {
viewModel.primarySpeaker.collectLatest { speaker ->
val items = listOfNotNull(speaker)
.map { participant -> ParticipantItem(viewModel.room, participant, speakerView = true) }
speakerAdapter.update(items)
}
}
}
// Controls setup
viewModel.cameraEnabled.observe(this) { enabled ->
lifecycleScope.launch {
repeatOnLifecycle(Lifecycle.State.CREATED) {
viewModel.cameraEnabled.collect { enabled ->
binding.camera.setOnClickListener { viewModel.setCameraEnabled(!enabled) }
binding.camera.setImageResource(
if (enabled) {
... ... @@ -115,7 +124,12 @@ class CallActivity : AppCompatActivity() {
)
binding.flipCamera.isEnabled = enabled
}
viewModel.micEnabled.observe(this) { enabled ->
}
}
lifecycleScope.launch {
repeatOnLifecycle(Lifecycle.State.CREATED) {
viewModel.micEnabled.collect { enabled ->
binding.mic.setOnClickListener { viewModel.setMicEnabled(!enabled) }
binding.mic.setImageResource(
if (enabled) {
... ... @@ -125,9 +139,14 @@ class CallActivity : AppCompatActivity() {
},
)
}
}
}
binding.flipCamera.setOnClickListener { viewModel.flipCamera() }
viewModel.screenshareEnabled.observe(this) { enabled ->
lifecycleScope.launch {
repeatOnLifecycle(Lifecycle.State.CREATED) {
viewModel.screenshareEnabled.collect { enabled ->
binding.screenShare.setOnClickListener {
if (enabled) {
viewModel.stopScreenCapture()
... ... @@ -143,6 +162,8 @@ class CallActivity : AppCompatActivity() {
},
)
}
}
}
binding.message.setOnClickListener {
val editText = EditText(this)
... ...
/*
* Copyright 2024 LiveKit, Inc.
* Copyright 2024-2025 LiveKit, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
... ... @@ -82,17 +82,9 @@ class ParticipantItem(
}
}
coroutineScope?.launch {
participant::audioTrackPublications.flow
.flatMapLatest { tracks ->
val audioTrack = tracks.firstOrNull()?.first
if (audioTrack != null) {
audioTrack::muted.flow
} else {
flowOf(true)
}
}
.collect { muted ->
viewBinding.muteIndicator.visibility = if (muted) View.VISIBLE else View.INVISIBLE
participant::isMicrophoneEnabled.flow
.collect { isMicEnabled ->
viewBinding.muteIndicator.visibility = if (isMicEnabled) View.VISIBLE else View.INVISIBLE
}
}
coroutineScope?.launch {
... ...