davidliu
Committed by GitHub

Custom audio modes handling (#260)

* Audio types feature

* docs

* cleanup

* audio docs
... ... @@ -15,15 +15,15 @@
- [Docs](#docs)
- [Installation](#installation)
- [Usage](#usage)
- [Permissions](#permissions)
- [Publishing camera and microphone](#publishing-camera-and-microphone)
- [Sharing screen](#sharing-screen)
- [Rendering subscribed tracks](#rendering-subscribed-tracks)
- [Audio modes](#audio-modes)
- [@FlowObservable](#flowobservable)
- [Permissions](#permissions)
- [Publishing camera and microphone](#publishing-camera-and-microphone)
- [Sharing screen](#sharing-screen)
- [Rendering subscribed tracks](#rendering-subscribed-tracks)
- [Audio modes](#audio-modes)
- [@FlowObservable](#flowobservable)
- [Sample App](#sample-app)
- [Dev Environment](#dev-environment)
- [Optional (Dev convenience)](#optional-dev-convenience)
- [Optional (Dev convenience)](#optional-dev-convenience)
## Docs
... ... @@ -67,7 +67,9 @@ subprojects {
### Permissions
LiveKit relies on the `RECORD_AUDIO` and `CAMERA` permissions to use the microphone and camera.
These permission must be requested at runtime. Reference the [sample app](https://github.com/livekit/client-sdk-android/blob/4e76e36e0d9f895c718bd41809ab5ff6c57aabd4/sample-app-compose/src/main/java/io/livekit/android/composesample/MainActivity.kt#L134) for an example.
These permission must be requested at runtime. Reference
the [sample app](https://github.com/livekit/client-sdk-android/blob/4e76e36e0d9f895c718bd41809ab5ff6c57aabd4/sample-app-compose/src/main/java/io/livekit/android/composesample/MainActivity.kt#L134)
for an example.
### Publishing camera and microphone
... ... @@ -174,22 +176,27 @@ for the full implementation.
### Audio modes
WebRTC utilizes an audio module to interface with the device's audio input and output. By default, the audio module is configured for two-way communications.
By default, the audio is configured for two-way communications.
If you are building a livestreaming or music app, you can make the following tweaks to improve playback quality:
If you are building a livestreaming or media playback focus app, you can use the preset
`MediaAudioType` when creating the `Room` object for better audio quality.
```kt
WebRTCModuleOptions options = WebRTCModuleOptions.getInstance();
AudioDeviceModule adm = JavaAudioDeviceModule.builder(this)
.setAudioAttributes(AudioAttributes.Builder()
.setUsage(AudioAttributes.USAGE_MEDIA)
.setContentType(AudioAttributes.CONTENT_TYPE_MUSIC)
.build())
.setUseStereoOutput(true)
.build();
options.audioDeviceModule = adm;
val room = LiveKit.create(
appContext = application,
overrides = LiveKitOverrides(
audioOptions = AudioOptions(
audioOutputType = AudioType.MediaAudioType()
)
)
)
```
Note: audio routing becomes automatically handled by the system and cannot be manually controlled.
For more control over the specific audio attributes and modes, a `CustomAudioType` can be
passed instead.
### `@FlowObservable`
Properties marked with `@FlowObservable` can be accessed as a Kotlin Flow to observe changes
... ... @@ -205,11 +212,13 @@ coroutineScope.launch {
## Sample App
**Note**: If you wish to run the sample apps directly from this repo, please consult the [Dev Environment instructions](#dev-environment).
**Note**: If you wish to run the sample apps directly from this repo, please consult
the [Dev Environment instructions](#dev-environment).
We have a basic quickstart sample
app [here](https://github.com/livekit/client-sdk-android/blob/main/sample-app-basic), showing how to
connect to a room, publish your device's audio/video, and display the video of one remote participant.
connect to a room, publish your device's audio/video, and display the video of one remote
participant.
There are two more full featured video conferencing sample apps:
... ...
... ... @@ -140,7 +140,7 @@ dependencies {
implementation 'org.jetbrains.kotlinx:kotlinx-serialization-json:1.1.0'
api 'io.github.webrtc-sdk:android:104.5112.10'
api "com.squareup.okhttp3:okhttp:4.10.0"
api 'com.github.davidliu:audioswitch:1689af118f69dcd8c8dc95e5a711dd0a7a626e69'
api 'com.github.davidliu:audioswitch:7b55cec426227a75be25b0d7ad8537d4aede2a2a'
implementation "androidx.annotation:annotation:1.4.0"
implementation "androidx.core:core:${versions.androidx_core}"
implementation "com.google.protobuf:protobuf-javalite:${versions.protobuf}"
... ...
... ... @@ -16,7 +16,11 @@
package io.livekit.android
import android.media.AudioAttributes
import android.media.AudioManager
import io.livekit.android.audio.AudioFocusHandler
import io.livekit.android.audio.AudioHandler
import io.livekit.android.audio.AudioSwitchHandler
import io.livekit.android.audio.NoAudioHandler
import okhttp3.OkHttpClient
import org.webrtc.VideoDecoderFactory
... ... @@ -25,7 +29,7 @@ import org.webrtc.audio.AudioDeviceModule
import org.webrtc.audio.JavaAudioDeviceModule
/**
* Overrides to replace LiveKit internally used component with custom implementations.
* Overrides to replace LiveKit internally used components with custom implementations.
*/
data class LiveKitOverrides(
/**
... ... @@ -34,6 +38,37 @@ data class LiveKitOverrides(
val okHttpClient: OkHttpClient? = null,
/**
* Override the [VideoEncoderFactory] used by the library.
*/
val videoEncoderFactory: VideoEncoderFactory? = null,
/**
* Override the [VideoDecoderFactory] used by the library.
*/
val videoDecoderFactory: VideoDecoderFactory? = null,
val audioOptions: AudioOptions? = null,
)
class AudioOptions(
/**
* Override the default output [AudioType].
*
* This affects the audio routing and how the audio is handled. Default is [AudioType.CallAudioType].
*
* Note: if [audioHandler] is also passed, the values from [audioOutputType] will not be reflected in it,
* and must be set manually.
*/
val audioOutputType: AudioType? = null,
/**
* Override the default [AudioHandler].
*
* Use [NoAudioHandler] to turn off automatic audio handling.
*/
val audioHandler: AudioHandler? = null,
/**
* Override the default [AudioDeviceModule].
*/
val audioDeviceModule: AudioDeviceModule? = null,
... ... @@ -44,22 +79,47 @@ data class LiveKitOverrides(
* Not used if [audioDeviceModule] is provided.
*/
val javaAudioDeviceModuleCustomizer: ((builder: JavaAudioDeviceModule.Builder) -> Unit)? = null,
)
sealed class AudioType(val audioMode: Int, val audioAttributes: AudioAttributes, val audioStreamType: Int) {
/**
* Override the [VideoEncoderFactory] used by the library.
* An audio type for general media playback usage (i.e. listener-only use cases).
*
* Audio routing is handled automatically by the system in normal media mode,
* and bluetooth microphones may not work on some devices.
*
* The default [AudioHandler] for this type is [AudioFocusHandler].
*/
val videoEncoderFactory: VideoEncoderFactory? = null,
class MediaAudioType : AudioType(
AudioManager.MODE_NORMAL,
AudioAttributes.Builder()
.setUsage(AudioAttributes.USAGE_MEDIA)
.setContentType(AudioAttributes.CONTENT_TYPE_SPEECH)
.build(),
AudioManager.STREAM_MUSIC
)
/**
* Override the [VideoDecoderFactory] used by the library.
* An audio type for calls (i.e. participating in the call or publishing local microphone).
*
* Audio routing can be manually controlled.
*
* The default [AudioHandler] for this type is [AudioSwitchHandler].
*/
val videoDecoderFactory: VideoDecoderFactory? = null,
class CallAudioType : AudioType(
AudioManager.MODE_IN_COMMUNICATION,
AudioAttributes.Builder()
.setUsage(AudioAttributes.USAGE_VOICE_COMMUNICATION)
.setContentType(AudioAttributes.CONTENT_TYPE_SPEECH)
.build(),
AudioManager.STREAM_VOICE_CALL
)
/**
* Override the default [AudioHandler].
* An audio type that takes in a user-defined [AudioAttributes] and audio stream type.
*
* Use [NoAudioHandler] to turn off automatic audio handling.
* The default [AudioHandler] for this type is [AudioFocusHandler].
*/
val audioHandler: AudioHandler? = null
)
\ No newline at end of file
class CustomAudioType(audioMode: Int, audioAttributes: AudioAttributes, audioStreamType: Int) :
AudioType(audioMode, audioAttributes, audioStreamType)
}
\ No newline at end of file
... ...
/*
* Copyright 2023 LiveKit, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.livekit.android.audio
import android.content.Context
import android.media.AudioAttributes
import android.media.AudioFocusRequest
import android.media.AudioManager
import android.os.Build
import androidx.annotation.RequiresApi
import javax.inject.Inject
import javax.inject.Singleton
/**
* A basic [AudioHandler] that manages audio focus while started.
*/
@Singleton
open class AudioFocusHandler
@Inject
constructor(context: Context) : AudioHandler {
/**
* The audio focus mode to use while started.
*
* Defaults to [AudioManager.AUDIOFOCUS_GAIN].
*/
var focusMode: Int = AudioManager.AUDIOFOCUS_GAIN
/**
* The audio stream type to use when requesting audio focus on pre-O devices.
*
* Defaults to [AudioManager.STREAM_MUSIC].
*
* Refer to this [compatibility table](https://source.android.com/docs/core/audio/attributes#compatibility)
* to ensure that your values match between android versions.
*/
var audioStreamType: Int = AudioManager.STREAM_MUSIC
/**
* The audio attribute usage type to use when requesting audio focus on devices O and beyond.
*
* Defaults to [AudioAttributes.USAGE_MEDIA].
*
* Refer to this [compatibility table](https://source.android.com/docs/core/audio/attributes#compatibility)
* to ensure that your values match between android versions.
*/
var audioAttributeUsageType: Int = AudioAttributes.USAGE_MEDIA
/**
* The audio attribute content type to use when requesting audio focus on devices O and beyond.
*
* Defaults to [AudioAttributes.CONTENT_TYPE_SPEECH].
*
* Refer to this [compatibility table](https://source.android.com/docs/core/audio/attributes#compatibility)
* to ensure that your values match between android versions.
*/
var audioAttributeContentType: Int = AudioAttributes.CONTENT_TYPE_SPEECH
private val audioManager = context.getSystemService(Context.AUDIO_SERVICE) as AudioManager
private var audioRequest: AudioFocusRequest? = null
private var audioFocusListener = AudioManager.OnAudioFocusChangeListener {
onAudioFocusChangeListener?.onAudioFocusChange(it)
}
var onAudioFocusChangeListener: AudioManager.OnAudioFocusChangeListener? = null
override fun start() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
audioRequest = createAudioRequest()
audioRequest?.let { audioManager.requestAudioFocus(it) }
} else {
@Suppress("DEPRECATION")
audioManager.requestAudioFocus(audioFocusListener, audioStreamType, focusMode)
}
}
override fun stop() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
audioRequest?.let { audioManager.abandonAudioFocusRequest(it) }
audioRequest = null
} else {
@Suppress("DEPRECATION")
audioManager.abandonAudioFocus(onAudioFocusChangeListener)
}
}
@RequiresApi(Build.VERSION_CODES.O)
open fun createAudioRequest(): AudioFocusRequest {
return AudioFocusRequest.Builder(focusMode)
.setOnAudioFocusChangeListener(audioFocusListener)
.setAudioAttributes(
AudioAttributes.Builder()
.setUsage(audioAttributeUsageType)
.setContentType(audioAttributeContentType)
.build()
)
.build()
}
}
\ No newline at end of file
... ...
... ... @@ -17,7 +17,7 @@
package io.livekit.android.audio
/**
* Interface for handling android audio routing.
* Interface for handling android audio.
*/
interface AudioHandler {
/**
... ...
... ... @@ -17,6 +17,7 @@
package io.livekit.android.audio
import android.content.Context
import android.media.AudioAttributes
import android.media.AudioManager
import android.os.Build
import android.os.Handler
... ... @@ -65,11 +66,20 @@ constructor(private val context: Context) : AudioHandler {
var preferredDeviceList: List<Class<out AudioDevice>>? = null
/**
* The audio mode to use while started.
* When true, AudioSwitchHandler will request audio focus on start and abandon on stop.
*
* Defaults to [AudioManager.MODE_NORMAL].
* Defaults to true.
*/
var audioMode: Int = AudioManager.MODE_NORMAL
var manageAudioFocus = true
/**
* The audio mode to use when requesting audio focus.
*
* Defaults to [AudioManager.MODE_IN_COMMUNICATION].
*
* Note: Manual audio routing may not work appropriately when using non-default values.
*/
var audioMode: Int = AudioManager.MODE_IN_COMMUNICATION
/**
* The audio focus mode to use while started.
... ... @@ -78,6 +88,43 @@ constructor(private val context: Context) : AudioHandler {
*/
var focusMode: Int = AudioManager.AUDIOFOCUS_GAIN
/**
* The audio stream type to use when requesting audio focus on pre-O devices.
*
* Defaults to [AudioManager.STREAM_VOICE_CALL].
*
* Refer to this [compatibility table](https://source.android.com/docs/core/audio/attributes#compatibility)
* to ensure that your values match between android versions.
*
* Note: Manual audio routing may not work appropriately when using non-default values.
*/
var audioStreamType: Int = AudioManager.STREAM_VOICE_CALL
/**
* The audio attribute usage type to use when requesting audio focus on devices O and beyond.
*
* Defaults to [AudioAttributes.USAGE_VOICE_COMMUNICATION].
*
* Refer to this [compatibility table](https://source.android.com/docs/core/audio/attributes#compatibility)
* to ensure that your values match between android versions.
*
* Note: Manual audio routing may not work appropriately when using non-default values.
*/
var audioAttributeUsageType: Int = AudioAttributes.USAGE_VOICE_COMMUNICATION
/**
* The audio attribute content type to use when requesting audio focus on devices O and beyond.
*
* Defaults to [AudioAttributes.CONTENT_TYPE_SPEECH].
*
* Refer to this [compatibility table](https://source.android.com/docs/core/audio/attributes#compatibility)
* to ensure that your values match between android versions.
*
* Note: Manual audio routing may not work appropriately when using non-default values.
*/
var audioAttributeContentType: Int = AudioAttributes.CONTENT_TYPE_SPEECH
private var audioSwitch: AbstractAudioSwitch? = null
// AudioSwitch is not threadsafe, so all calls should be done on the main thread.
... ... @@ -103,8 +150,13 @@ constructor(private val context: Context) : AudioHandler {
preferredDeviceList = preferredDeviceList ?: defaultPreferredDeviceList
)
}
switch.manageAudioFocus = manageAudioFocus
switch.audioMode = audioMode
switch.focusMode = focusMode
switch.audioStreamType = audioStreamType
switch.audioAttributeUsageType = audioAttributeUsageType
switch.audioAttributeContentType = audioAttributeContentType
audioSwitch = switch
switch.start(audioDeviceChangeListener ?: defaultAudioDeviceChangeListener)
switch.activate()
... ...
... ... @@ -16,23 +16,58 @@
package io.livekit.android.dagger
import androidx.annotation.Nullable
import android.media.AudioAttributes
import dagger.Module
import dagger.Provides
import io.livekit.android.AudioType
import io.livekit.android.audio.AudioFocusHandler
import io.livekit.android.audio.AudioHandler
import io.livekit.android.audio.AudioSwitchHandler
import javax.inject.Named
import javax.inject.Provider
import javax.inject.Singleton
@Module
object AudioHandlerModule {
@Provides
fun audioOutputType(
@Named(InjectionNames.OVERRIDE_AUDIO_OUTPUT_TYPE)
audioOutputOverride: AudioType?,
): AudioType {
return audioOutputOverride ?: AudioType.CallAudioType()
}
@Provides
fun audioOutputAttributes(
audioType: AudioType
): AudioAttributes {
return audioType.audioAttributes
}
@Provides
@Singleton
fun audioHandler(
audioSwitchHandler: Provider<AudioSwitchHandler>,
audioFocusHandler: Provider<AudioFocusHandler>,
@Named(InjectionNames.OVERRIDE_AUDIO_HANDLER)
@Nullable
audioHandlerOverride: AudioHandler?
audioHandlerOverride: AudioHandler?,
audioOutputType: AudioType,
): AudioHandler {
return audioHandlerOverride ?: audioSwitchHandler.get()
return audioHandlerOverride ?: when (audioOutputType) {
is AudioType.CallAudioType -> {
audioSwitchHandler.get().apply {
audioMode = audioOutputType.audioMode
audioAttributeContentType = audioOutputType.audioAttributes.contentType
audioAttributeUsageType = audioOutputType.audioAttributes.usage
audioStreamType = audioOutputType.audioStreamType
}
}
is AudioType.MediaAudioType,
is AudioType.CustomAudioType -> {
audioFocusHandler.get()
}
}
}
}
\ No newline at end of file
... ...
... ... @@ -49,4 +49,5 @@ object InjectionNames {
internal const val OVERRIDE_VIDEO_ENCODER_FACTORY = "override_video_encoder_factory"
internal const val OVERRIDE_VIDEO_DECODER_FACTORY = "override_video_decoder_factory"
internal const val OVERRIDE_AUDIO_HANDLER = "override_audio_handler"
internal const val OVERRIDE_AUDIO_OUTPUT_TYPE = "override_audio_output_type"
}
\ No newline at end of file
... ...
... ... @@ -16,12 +16,14 @@
package io.livekit.android.dagger
import android.annotation.SuppressLint
import androidx.annotation.Nullable
import dagger.Module
import dagger.Provides
import io.livekit.android.LiveKitOverrides
import javax.inject.Named
@SuppressLint("KotlinNullnessAnnotation")
@Module
class OverridesModule(private val overrides: LiveKitOverrides) {
... ... @@ -33,12 +35,12 @@ class OverridesModule(private val overrides: LiveKitOverrides) {
@Provides
@Named(InjectionNames.OVERRIDE_AUDIO_DEVICE_MODULE)
@Nullable
fun audioDeviceModule() = overrides.audioDeviceModule
fun audioDeviceModule() = overrides.audioOptions?.audioDeviceModule
@Provides
@Named(InjectionNames.OVERRIDE_JAVA_AUDIO_DEVICE_MODULE_CUSTOMIZER)
@Nullable
fun javaAudioDeviceModuleCustomizer() = overrides.javaAudioDeviceModuleCustomizer
fun javaAudioDeviceModuleCustomizer() = overrides.audioOptions?.javaAudioDeviceModuleCustomizer
@Provides
@Named(InjectionNames.OVERRIDE_VIDEO_ENCODER_FACTORY)
... ... @@ -53,6 +55,10 @@ class OverridesModule(private val overrides: LiveKitOverrides) {
@Provides
@Named(InjectionNames.OVERRIDE_AUDIO_HANDLER)
@Nullable
fun audioHandler() = overrides.audioHandler
fun audioHandler() = overrides.audioOptions?.audioHandler
@Provides
@Named(InjectionNames.OVERRIDE_AUDIO_OUTPUT_TYPE)
fun audioOutputType() = overrides.audioOptions?.audioOutputType
}
... ...
... ... @@ -17,6 +17,7 @@
package io.livekit.android.dagger
import android.content.Context
import android.media.AudioAttributes
import android.media.MediaRecorder
import android.os.Build
import androidx.annotation.Nullable
... ... @@ -48,6 +49,7 @@ object RTCModule {
@Named(InjectionNames.OVERRIDE_JAVA_AUDIO_DEVICE_MODULE_CUSTOMIZER)
@Nullable
moduleCustomizer: ((builder: JavaAudioDeviceModule.Builder) -> Unit)?,
audioOutputAttributes: AudioAttributes,
appContext: Context
): AudioDeviceModule {
if (audioDeviceModuleOverride != null) {
... ... @@ -113,6 +115,7 @@ object RTCModule {
}
val useHardwareAudioProcessing = Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q
val builder = JavaAudioDeviceModule.builder(appContext)
.setUseHardwareAcousticEchoCanceler(useHardwareAudioProcessing)
.setUseHardwareNoiseSuppressor(useHardwareAudioProcessing)
... ... @@ -121,6 +124,7 @@ object RTCModule {
.setAudioRecordStateCallback(audioRecordStateCallback)
.setAudioTrackStateCallback(audioTrackStateCallback)
.setAudioSource(MediaRecorder.AudioSource.DEFAULT)
.setAudioAttributes(audioOutputAttributes)
moduleCustomizer?.invoke(builder)
return builder.createAudioDeviceModule()
... ...
... ... @@ -10,7 +10,6 @@ import androidx.lifecycle.MutableLiveData
import androidx.lifecycle.viewModelScope
import com.github.ajalt.timberkt.Timber
import io.livekit.android.LiveKit
import io.livekit.android.LiveKitOverrides
import io.livekit.android.RoomOptions
import io.livekit.android.audio.AudioSwitchHandler
import io.livekit.android.e2ee.E2EEOptions
... ... @@ -42,7 +41,6 @@ class CallViewModel(
val e2ee: Boolean = false,
val e2eeKey: String? = "",
) : AndroidViewModel(application) {
val audioHandler = AudioSwitchHandler(application)
private fun getE2EEOptions(): E2EEOptions? {
var e2eeOptions: E2EEOptions? = null
... ... @@ -53,16 +51,13 @@ class CallViewModel(
return e2eeOptions
}
val room = LiveKit.create(
appContext = application,
options = RoomOptions(adaptiveStream = true, dynacast = true),
overrides = LiveKitOverrides(
audioHandler = audioHandler
)
)
val audioHandler = room.audioHandler as AudioSwitchHandler
val participants = room::remoteParticipants.flow
.map { remoteParticipants ->
listOf<Participant>(room.localParticipant) +
... ...
... ... @@ -15,6 +15,7 @@ import androidx.compose.runtime.livedata.observeAsState
import androidx.compose.ui.Modifier
import androidx.lifecycle.MutableLiveData
import androidx.lifecycle.lifecycleScope
import io.livekit.android.AudioOptions
import io.livekit.android.LiveKit
import io.livekit.android.LiveKitOverrides
import io.livekit.android.room.Room
... ... @@ -26,7 +27,7 @@ import kotlinx.coroutines.launch
import org.webrtc.EglBase
import java.io.File
import java.io.IOException
import java.util.*
import java.util.Date
class MainActivity : ComponentActivity() {
lateinit var room: Room
... ... @@ -40,12 +41,14 @@ class MainActivity : ComponentActivity() {
room = LiveKit.create(
appContext = applicationContext,
overrides = LiveKitOverrides(
javaAudioDeviceModuleCustomizer = { builder ->
// Receive audio samples
builder.setSamplesReadyCallback { samples ->
videoFileRenderer?.onWebRtcAudioRecordSamplesReady(samples)
audioOptions = AudioOptions(
javaAudioDeviceModuleCustomizer = { builder ->
// Receive audio samples
builder.setSamplesReadyCallback { samples ->
videoFileRenderer?.onWebRtcAudioRecordSamplesReady(samples)
}
}
}
),
)
)
... ...