David Liu

fix sample app not handling track subscribing late

package io.livekit.android.room.track
import io.livekit.android.room.participant.Participant
import io.livekit.android.util.flowDelegate
import livekit.LivekitModels
import java.lang.ref.WeakReference
... ... @@ -9,7 +10,7 @@ open class TrackPublication(
track: Track?,
participant: Participant
) {
open var track: Track? = track
open var track: Track? by flowDelegate(track)
internal set
var name: String
internal set
... ...
... ... @@ -32,9 +32,15 @@ android {
dependencies {
implementation 'androidx.core:core-ktx:1.7.0'
implementation 'androidx.appcompat:appcompat:1.3.1'
implementation 'com.google.android.material:material:1.4.0'
api "androidx.core:core-ktx:${versions.androidx_core}"
api 'androidx.appcompat:appcompat:1.4.0'
api 'com.google.android.material:material:1.4.0'
api deps.kotlinx_coroutines
api deps.timber
api "androidx.lifecycle:lifecycle-runtime-ktx:${versions.androidx_lifecycle}"
api "androidx.lifecycle:lifecycle-viewmodel-ktx:${versions.androidx_lifecycle}"
api "androidx.lifecycle:lifecycle-common-java8:${versions.androidx_lifecycle}"
api project(":livekit-android-sdk")
testImplementation 'junit:junit:4.+'
androidTestImplementation 'androidx.test.ext:junit:1.1.3'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.4.0'
... ...
package io.livekit.android.composesample
package io.livekit.android.sample
import android.app.Application
import android.content.Intent
... ... @@ -62,7 +62,7 @@ class CallViewModel(
val flipButtonVideoEnabled = mutableFlipVideoButtonEnabled.hide()
private val mutableScreencastEnabled = MutableLiveData(false)
val screencastEnabled = mutableScreencastEnabled.hide()
val screenshareEnabled = mutableScreencastEnabled.hide()
init {
viewModelScope.launch {
... ... @@ -152,7 +152,7 @@ class CallViewModel(
}
}
fun flipVideo() {
fun flipCamera() {
room.value?.localParticipant?.let { participant ->
val videoTrack = participant.getTrackPublication(Track.Source.CAMERA)
?.track as? LocalVideoTrack
... ...
... ... @@ -27,6 +27,7 @@ import com.google.accompanist.pager.ExperimentalPagerApi
import io.livekit.android.composesample.ui.theme.AppTheme
import io.livekit.android.room.Room
import io.livekit.android.room.participant.Participant
import io.livekit.android.sample.CallViewModel
import kotlinx.coroutines.Dispatchers
import kotlinx.parcelize.Parcelize
... ... @@ -87,7 +88,7 @@ class CallActivity : AppCompatActivity() {
val micEnabled by viewModel.micEnabled.observeAsState(true)
val videoEnabled by viewModel.cameraEnabled.observeAsState(true)
val flipButtonEnabled by viewModel.flipButtonVideoEnabled.observeAsState(true)
val screencastEnabled by viewModel.screencastEnabled.observeAsState(false)
val screencastEnabled by viewModel.screenshareEnabled.observeAsState(false)
Content(
room,
participants,
... ... @@ -209,7 +210,7 @@ class CallActivity : AppCompatActivity() {
)
}
Surface(
onClick = { viewModel.flipVideo() },
onClick = { viewModel.flipCamera() },
) {
Icon(
painterResource(id = R.drawable.outline_flip_camera_android_24),
... ...
... ... @@ -37,7 +37,7 @@ dependencies {
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
implementation deps.kotlinx_coroutines
implementation 'com.google.android.material:material:1.4.0'
implementation 'androidx.appcompat:appcompat:1.3.1'
implementation 'androidx.appcompat:appcompat:1.4.0'
implementation "androidx.core:core-ktx:${versions.androidx_core}"
implementation "androidx.activity:activity-ktx:1.4.0"
implementation 'androidx.fragment:fragment-ktx:1.3.6'
... ... @@ -52,7 +52,6 @@ dependencies {
implementation 'com.snakydesign.livedataextensions:lives:1.3.0'
implementation deps.timber
implementation project(":sample-app-common")
implementation project(":livekit-android-sdk")
testImplementation 'junit:junit:4.12'
androidTestImplementation 'androidx.test.ext:junit:1.1.2'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.3.0'
... ...
... ... @@ -8,15 +8,15 @@ import android.os.Parcelable
import android.view.View
import androidx.activity.result.contract.ActivityResultContracts
import androidx.appcompat.app.AppCompatActivity
import androidx.lifecycle.lifecycleScope
import androidx.recyclerview.widget.LinearLayoutManager
import com.github.ajalt.timberkt.Timber
import com.snakydesign.livedataextensions.combineLatest
import com.snakydesign.livedataextensions.scan
import com.snakydesign.livedataextensions.take
import com.xwray.groupie.GroupieAdapter
import io.livekit.android.room.participant.Participant
import io.livekit.android.room.track.Track
import io.livekit.android.room.track.VideoTrack
import io.livekit.android.sample.databinding.CallActivityBinding
import io.livekit.android.util.flow
import kotlinx.coroutines.flow.*
import kotlinx.parcelize.Parcelize
class CallActivity : AppCompatActivity() {
... ... @@ -41,7 +41,7 @@ class CallActivity : AppCompatActivity() {
if (resultCode != Activity.RESULT_OK || data == null) {
return@registerForActivityResult
}
viewModel.setScreenshare(true, data)
viewModel.startScreenCapture(data)
}
override fun onCreate(savedInstanceState: Bundle?) {
... ... @@ -60,48 +60,67 @@ class CallActivity : AppCompatActivity() {
this.adapter = adapter
}
combineLatest(
viewModel.room,
viewModel.participants
) { room, participants -> room to participants }
.observe(this) {
val (room, participants) = it
val items = participants.map { participant -> ParticipantItem(room, participant) }
adapter.update(items)
}
lifecycleScope.launchWhenCreated {
viewModel.room
.combine(viewModel.participants) { room, participants -> room to participants }
.collect { (room, participants) ->
if (room != null) {
val items = participants.map { participant -> ParticipantItem(room, participant) }
adapter.update(items)
}
}
}
// speaker view setup
viewModel.room.take(1).observe(this) { room ->
room.initVideoRenderer(binding.speakerVideoView)
viewModel.activeSpeaker
.scan(Pair<Participant?, Participant?>(null, null)) { pair, participant ->
// old participant is first
// latest active participant is second
Pair(pair.second, participant)
}.observe(this) { (oldSpeaker, newSpeaker) ->
// Remove any renderering from the old speaker
oldSpeaker?.videoTracks
?.values
?.forEach { trackPublication ->
(trackPublication.track as? VideoTrack)?.removeRenderer(binding.speakerVideoView)
}
binding.identityText.text = newSpeaker?.identity
val videoTrack = newSpeaker?.videoTracks?.values
?.firstOrNull()
?.track as? VideoTrack
if (videoTrack != null) {
lifecycleScope.launchWhenCreated {
viewModel.room.filterNotNull().take(1)
.transform { room ->
// Initialize video renderer
room.initVideoRenderer(binding.speakerVideoView)
// Observe primary speaker changes
emitAll(viewModel.primarySpeaker)
}.flatMapLatest { primarySpeaker ->
// Update new primary speaker identity
binding.identityText.text = primarySpeaker?.identity
// observe videoTracks changes.
if (primarySpeaker != null) {
primarySpeaker::videoTracks.flow
.map { primarySpeaker to it }
} else {
emptyFlow()
}
}.flatMapLatest { (participant, videoTracks) ->
for (videoTrack in videoTracks.values) {
Timber.e { "videoTrack is ${videoTrack.track}" }
}
// Prioritize any screenshare streams.
val trackPublication = participant.getTrackPublication(Track.Source.SCREEN_SHARE)
?: participant.getTrackPublication(Track.Source.CAMERA)
?: videoTracks.values.firstOrNull()
?: return@flatMapLatest emptyFlow()
trackPublication::track.flow
}.collect { videoTrack ->
// Cleanup old video track
val oldVideoTrack = binding.speakerVideoView.tag as? VideoTrack
oldVideoTrack?.removeRenderer(binding.speakerVideoView)
// Bind new video track to video view.
if (videoTrack is VideoTrack) {
videoTrack.addRenderer(binding.speakerVideoView)
binding.speakerVideoView.visibility = View.VISIBLE
} else {
binding.speakerVideoView.visibility = View.INVISIBLE
}
binding.speakerVideoView.tag = videoTrack
}
}
// Controls setup
viewModel.videoEnabled.observe(this) { enabled ->
viewModel.cameraEnabled.observe(this) { enabled ->
binding.camera.setOnClickListener { viewModel.setCameraEnabled(!enabled) }
binding.camera.setImageResource(
if (enabled) R.drawable.outline_videocam_24
... ... @@ -121,7 +140,7 @@ class CallActivity : AppCompatActivity() {
viewModel.screenshareEnabled.observe(this) { enabled ->
binding.screenShare.setOnClickListener {
if (enabled) {
viewModel.setScreenshare(!enabled)
viewModel.stopScreenCapture()
} else {
requestMediaProjection()
}
... ...
package io.livekit.android.sample
import android.app.Application
import android.content.Intent
import androidx.lifecycle.AndroidViewModel
import androidx.lifecycle.MutableLiveData
import androidx.lifecycle.viewModelScope
import com.snakydesign.livedataextensions.distinctUntilChanged
import io.livekit.android.ConnectOptions
import io.livekit.android.LiveKit
import io.livekit.android.events.RoomEvent
import io.livekit.android.events.collect
import io.livekit.android.room.Room
import io.livekit.android.room.participant.Participant
import io.livekit.android.room.participant.RemoteParticipant
import io.livekit.android.room.track.CameraPosition
import io.livekit.android.room.track.LocalVideoTrack
import io.livekit.android.room.track.Track
import io.livekit.android.sample.util.hide
import kotlinx.coroutines.launch
class CallViewModel(
val url: String,
val token: String,
application: Application
) : AndroidViewModel(application) {
private val mutableRoom = MutableLiveData<Room>()
val room = mutableRoom.hide()
private val mutableParticipants = MutableLiveData<List<Participant>>()
val participants = mutableParticipants.hide()
private val mutableActiveSpeaker = MutableLiveData<Participant>()
val activeSpeaker = mutableActiveSpeaker.hide().distinctUntilChanged()
private val mutableVideoEnabled = MutableLiveData<Boolean>()
val videoEnabled = mutableVideoEnabled.hide().distinctUntilChanged()
private val mutableMicEnabled = MutableLiveData<Boolean>()
val micEnabled = mutableMicEnabled.hide().distinctUntilChanged()
private val mutableScreenshareEnabled = MutableLiveData<Boolean>()
val screenshareEnabled = mutableScreenshareEnabled.hide().distinctUntilChanged()
init {
viewModelScope.launch {
val room = LiveKit.connect(
application,
url,
token,
ConnectOptions(),
null
)
launch {
room.events.collect {
handleRoomEvent(it)
}
}
val localParticipant = room.localParticipant
val audioTrack = localParticipant.createAudioTrack()
localParticipant.publishAudioTrack(audioTrack)
val videoTrack = localParticipant.createVideoTrack()
localParticipant.publishVideoTrack(videoTrack)
videoTrack.startCapture()
updateParticipants(room)
mutableActiveSpeaker.value = localParticipant
mutableRoom.value = room
mutableVideoEnabled.value =
!(localParticipant.getTrackPublication(Track.Source.CAMERA)?.muted ?: false)
mutableMicEnabled.value =
!(localParticipant.getTrackPublication(Track.Source.MICROPHONE)?.muted ?: false)
mutableScreenshareEnabled.value = false
}
}
private fun handleRoomEvent(event: RoomEvent) {
when (event) {
is RoomEvent.ParticipantConnected -> updateParticipants(event.room)
is RoomEvent.ParticipantDisconnected -> updateParticipants(event.room)
is RoomEvent.ActiveSpeakersChanged -> handleActiveSpeakersChanged(event.speakers)
}
}
private fun updateParticipants(room: Room) {
val participantList = listOf(room.localParticipant) +
room.remoteParticipants
.keys
.sortedBy { it }
.mapNotNull { room.remoteParticipants[it] }
mutableParticipants.postValue(participantList)
if (!participantList.contains(mutableActiveSpeaker.value) || mutableActiveSpeaker.value == null) {
// active speaker has left, choose someone else at random.
mutableActiveSpeaker.postValue(participantList.last())
}
}
fun handleActiveSpeakersChanged(speakers: List<Participant>) {
// If old active speaker is still active, don't change.
if (speakers.isEmpty() || speakers.contains(mutableActiveSpeaker.value)) {
return
}
val newSpeaker = speakers
.filter { it is RemoteParticipant } // Try not to display local participant as speaker.
.firstOrNull() ?: return
mutableActiveSpeaker.postValue(newSpeaker)
}
override fun onCleared() {
super.onCleared()
mutableRoom.value?.disconnect()
}
fun setCameraEnabled(enabled: Boolean) {
val localParticipant = room.value?.localParticipant ?: return
viewModelScope.launch {
localParticipant.setCameraEnabled(enabled)
mutableVideoEnabled.postValue(enabled)
}
}
fun setMicEnabled(enabled: Boolean) {
val localParticipant = room.value?.localParticipant ?: return
viewModelScope.launch {
localParticipant.setMicrophoneEnabled(enabled)
mutableMicEnabled.postValue(enabled)
}
}
fun setScreenshare(
enabled: Boolean,
mediaProjectionPermissionResultData: Intent? = null
) {
val localParticipant = room.value?.localParticipant ?: return
viewModelScope.launch {
localParticipant.setScreenShareEnabled(enabled, mediaProjectionPermissionResultData)
mutableScreenshareEnabled.postValue(enabled)
}
}
fun flipCamera() {
val localParticipant = room.value?.localParticipant ?: return
val localVideoTrack = localParticipant
.getTrackPublication(Track.Source.CAMERA)
?.track as? LocalVideoTrack
?: return
val currentOptions = localVideoTrack.options
val newPosition = when (currentOptions.position) {
CameraPosition.FRONT -> CameraPosition.BACK
CameraPosition.BACK -> CameraPosition.FRONT
null -> null
}
if (newPosition != null) {
localVideoTrack.restartTrack(options = currentOptions.copy(position = newPosition))
}
}
}
... ... @@ -23,12 +23,8 @@
<io.livekit.android.renderer.TextureViewRenderer
android:id="@+id/speaker_video_view"
android:layout_width="0dp"
android:layout_height="0dp"
app:layout_constraintBottom_toTopOf="@id/audience_row"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent" />
android:layout_width="match_parent"
android:layout_height="match_parent" />
</FrameLayout>
<FrameLayout
... ...