Committed by
GitHub
Custom audio modes handling (#260)
* Audio types feature * docs * cleanup * audio docs
正在显示
12 个修改的文件
包含
334 行增加
和
56 行删除
| @@ -15,15 +15,15 @@ | @@ -15,15 +15,15 @@ | ||
| 15 | - [Docs](#docs) | 15 | - [Docs](#docs) |
| 16 | - [Installation](#installation) | 16 | - [Installation](#installation) |
| 17 | - [Usage](#usage) | 17 | - [Usage](#usage) |
| 18 | - - [Permissions](#permissions) | ||
| 19 | - - [Publishing camera and microphone](#publishing-camera-and-microphone) | ||
| 20 | - - [Sharing screen](#sharing-screen) | ||
| 21 | - - [Rendering subscribed tracks](#rendering-subscribed-tracks) | ||
| 22 | - - [Audio modes](#audio-modes) | ||
| 23 | - - [@FlowObservable](#flowobservable) | 18 | + - [Permissions](#permissions) |
| 19 | + - [Publishing camera and microphone](#publishing-camera-and-microphone) | ||
| 20 | + - [Sharing screen](#sharing-screen) | ||
| 21 | + - [Rendering subscribed tracks](#rendering-subscribed-tracks) | ||
| 22 | + - [Audio modes](#audio-modes) | ||
| 23 | + - [@FlowObservable](#flowobservable) | ||
| 24 | - [Sample App](#sample-app) | 24 | - [Sample App](#sample-app) |
| 25 | - [Dev Environment](#dev-environment) | 25 | - [Dev Environment](#dev-environment) |
| 26 | - - [Optional (Dev convenience)](#optional-dev-convenience) | 26 | + - [Optional (Dev convenience)](#optional-dev-convenience) |
| 27 | 27 | ||
| 28 | ## Docs | 28 | ## Docs |
| 29 | 29 | ||
| @@ -67,7 +67,9 @@ subprojects { | @@ -67,7 +67,9 @@ subprojects { | ||
| 67 | ### Permissions | 67 | ### Permissions |
| 68 | 68 | ||
| 69 | LiveKit relies on the `RECORD_AUDIO` and `CAMERA` permissions to use the microphone and camera. | 69 | LiveKit relies on the `RECORD_AUDIO` and `CAMERA` permissions to use the microphone and camera. |
| 70 | -These permission must be requested at runtime. Reference the [sample app](https://github.com/livekit/client-sdk-android/blob/4e76e36e0d9f895c718bd41809ab5ff6c57aabd4/sample-app-compose/src/main/java/io/livekit/android/composesample/MainActivity.kt#L134) for an example. | 70 | +These permission must be requested at runtime. Reference |
| 71 | +the [sample app](https://github.com/livekit/client-sdk-android/blob/4e76e36e0d9f895c718bd41809ab5ff6c57aabd4/sample-app-compose/src/main/java/io/livekit/android/composesample/MainActivity.kt#L134) | ||
| 72 | +for an example. | ||
| 71 | 73 | ||
| 72 | ### Publishing camera and microphone | 74 | ### Publishing camera and microphone |
| 73 | 75 | ||
| @@ -174,22 +176,27 @@ for the full implementation. | @@ -174,22 +176,27 @@ for the full implementation. | ||
| 174 | 176 | ||
| 175 | ### Audio modes | 177 | ### Audio modes |
| 176 | 178 | ||
| 177 | -WebRTC utilizes an audio module to interface with the device's audio input and output. By default, the audio module is configured for two-way communications. | 179 | +By default, the audio is configured for two-way communications. |
| 178 | 180 | ||
| 179 | -If you are building a livestreaming or music app, you can make the following tweaks to improve playback quality: | 181 | +If you are building a livestreaming or media playback focus app, you can use the preset |
| 182 | +`MediaAudioType` when creating the `Room` object for better audio quality. | ||
| 180 | 183 | ||
| 181 | ```kt | 184 | ```kt |
| 182 | -WebRTCModuleOptions options = WebRTCModuleOptions.getInstance(); | ||
| 183 | -AudioDeviceModule adm = JavaAudioDeviceModule.builder(this) | ||
| 184 | - .setAudioAttributes(AudioAttributes.Builder() | ||
| 185 | - .setUsage(AudioAttributes.USAGE_MEDIA) | ||
| 186 | - .setContentType(AudioAttributes.CONTENT_TYPE_MUSIC) | ||
| 187 | - .build()) | ||
| 188 | - .setUseStereoOutput(true) | ||
| 189 | - .build(); | ||
| 190 | -options.audioDeviceModule = adm; | 185 | +val room = LiveKit.create( |
| 186 | + appContext = application, | ||
| 187 | + overrides = LiveKitOverrides( | ||
| 188 | + audioOptions = AudioOptions( | ||
| 189 | + audioOutputType = AudioType.MediaAudioType() | ||
| 190 | + ) | ||
| 191 | + ) | ||
| 192 | +) | ||
| 191 | ``` | 193 | ``` |
| 192 | 194 | ||
| 195 | +Note: audio routing becomes automatically handled by the system and cannot be manually controlled. | ||
| 196 | + | ||
| 197 | +For more control over the specific audio attributes and modes, a `CustomAudioType` can be | ||
| 198 | +passed instead. | ||
| 199 | + | ||
| 193 | ### `@FlowObservable` | 200 | ### `@FlowObservable` |
| 194 | 201 | ||
| 195 | Properties marked with `@FlowObservable` can be accessed as a Kotlin Flow to observe changes | 202 | Properties marked with `@FlowObservable` can be accessed as a Kotlin Flow to observe changes |
| @@ -205,11 +212,13 @@ coroutineScope.launch { | @@ -205,11 +212,13 @@ coroutineScope.launch { | ||
| 205 | 212 | ||
| 206 | ## Sample App | 213 | ## Sample App |
| 207 | 214 | ||
| 208 | -**Note**: If you wish to run the sample apps directly from this repo, please consult the [Dev Environment instructions](#dev-environment). | 215 | +**Note**: If you wish to run the sample apps directly from this repo, please consult |
| 216 | +the [Dev Environment instructions](#dev-environment). | ||
| 209 | 217 | ||
| 210 | We have a basic quickstart sample | 218 | We have a basic quickstart sample |
| 211 | app [here](https://github.com/livekit/client-sdk-android/blob/main/sample-app-basic), showing how to | 219 | app [here](https://github.com/livekit/client-sdk-android/blob/main/sample-app-basic), showing how to |
| 212 | -connect to a room, publish your device's audio/video, and display the video of one remote participant. | 220 | +connect to a room, publish your device's audio/video, and display the video of one remote |
| 221 | +participant. | ||
| 213 | 222 | ||
| 214 | There are two more full featured video conferencing sample apps: | 223 | There are two more full featured video conferencing sample apps: |
| 215 | 224 |
| @@ -140,7 +140,7 @@ dependencies { | @@ -140,7 +140,7 @@ dependencies { | ||
| 140 | implementation 'org.jetbrains.kotlinx:kotlinx-serialization-json:1.1.0' | 140 | implementation 'org.jetbrains.kotlinx:kotlinx-serialization-json:1.1.0' |
| 141 | api 'io.github.webrtc-sdk:android:104.5112.10' | 141 | api 'io.github.webrtc-sdk:android:104.5112.10' |
| 142 | api "com.squareup.okhttp3:okhttp:4.10.0" | 142 | api "com.squareup.okhttp3:okhttp:4.10.0" |
| 143 | - api 'com.github.davidliu:audioswitch:1689af118f69dcd8c8dc95e5a711dd0a7a626e69' | 143 | + api 'com.github.davidliu:audioswitch:7b55cec426227a75be25b0d7ad8537d4aede2a2a' |
| 144 | implementation "androidx.annotation:annotation:1.4.0" | 144 | implementation "androidx.annotation:annotation:1.4.0" |
| 145 | implementation "androidx.core:core:${versions.androidx_core}" | 145 | implementation "androidx.core:core:${versions.androidx_core}" |
| 146 | implementation "com.google.protobuf:protobuf-javalite:${versions.protobuf}" | 146 | implementation "com.google.protobuf:protobuf-javalite:${versions.protobuf}" |
| @@ -16,7 +16,11 @@ | @@ -16,7 +16,11 @@ | ||
| 16 | 16 | ||
| 17 | package io.livekit.android | 17 | package io.livekit.android |
| 18 | 18 | ||
| 19 | +import android.media.AudioAttributes | ||
| 20 | +import android.media.AudioManager | ||
| 21 | +import io.livekit.android.audio.AudioFocusHandler | ||
| 19 | import io.livekit.android.audio.AudioHandler | 22 | import io.livekit.android.audio.AudioHandler |
| 23 | +import io.livekit.android.audio.AudioSwitchHandler | ||
| 20 | import io.livekit.android.audio.NoAudioHandler | 24 | import io.livekit.android.audio.NoAudioHandler |
| 21 | import okhttp3.OkHttpClient | 25 | import okhttp3.OkHttpClient |
| 22 | import org.webrtc.VideoDecoderFactory | 26 | import org.webrtc.VideoDecoderFactory |
| @@ -25,7 +29,7 @@ import org.webrtc.audio.AudioDeviceModule | @@ -25,7 +29,7 @@ import org.webrtc.audio.AudioDeviceModule | ||
| 25 | import org.webrtc.audio.JavaAudioDeviceModule | 29 | import org.webrtc.audio.JavaAudioDeviceModule |
| 26 | 30 | ||
| 27 | /** | 31 | /** |
| 28 | - * Overrides to replace LiveKit internally used component with custom implementations. | 32 | + * Overrides to replace LiveKit internally used components with custom implementations. |
| 29 | */ | 33 | */ |
| 30 | data class LiveKitOverrides( | 34 | data class LiveKitOverrides( |
| 31 | /** | 35 | /** |
| @@ -34,6 +38,37 @@ data class LiveKitOverrides( | @@ -34,6 +38,37 @@ data class LiveKitOverrides( | ||
| 34 | val okHttpClient: OkHttpClient? = null, | 38 | val okHttpClient: OkHttpClient? = null, |
| 35 | 39 | ||
| 36 | /** | 40 | /** |
| 41 | + * Override the [VideoEncoderFactory] used by the library. | ||
| 42 | + */ | ||
| 43 | + val videoEncoderFactory: VideoEncoderFactory? = null, | ||
| 44 | + | ||
| 45 | + /** | ||
| 46 | + * Override the [VideoDecoderFactory] used by the library. | ||
| 47 | + */ | ||
| 48 | + val videoDecoderFactory: VideoDecoderFactory? = null, | ||
| 49 | + | ||
| 50 | + val audioOptions: AudioOptions? = null, | ||
| 51 | +) | ||
| 52 | + | ||
| 53 | + | ||
| 54 | +class AudioOptions( | ||
| 55 | + /** | ||
| 56 | + * Override the default output [AudioType]. | ||
| 57 | + * | ||
| 58 | + * This affects the audio routing and how the audio is handled. Default is [AudioType.CallAudioType]. | ||
| 59 | + * | ||
| 60 | + * Note: if [audioHandler] is also passed, the values from [audioOutputType] will not be reflected in it, | ||
| 61 | + * and must be set manually. | ||
| 62 | + */ | ||
| 63 | + val audioOutputType: AudioType? = null, | ||
| 64 | + /** | ||
| 65 | + * Override the default [AudioHandler]. | ||
| 66 | + * | ||
| 67 | + * Use [NoAudioHandler] to turn off automatic audio handling. | ||
| 68 | + */ | ||
| 69 | + val audioHandler: AudioHandler? = null, | ||
| 70 | + | ||
| 71 | + /** | ||
| 37 | * Override the default [AudioDeviceModule]. | 72 | * Override the default [AudioDeviceModule]. |
| 38 | */ | 73 | */ |
| 39 | val audioDeviceModule: AudioDeviceModule? = null, | 74 | val audioDeviceModule: AudioDeviceModule? = null, |
| @@ -44,22 +79,47 @@ data class LiveKitOverrides( | @@ -44,22 +79,47 @@ data class LiveKitOverrides( | ||
| 44 | * Not used if [audioDeviceModule] is provided. | 79 | * Not used if [audioDeviceModule] is provided. |
| 45 | */ | 80 | */ |
| 46 | val javaAudioDeviceModuleCustomizer: ((builder: JavaAudioDeviceModule.Builder) -> Unit)? = null, | 81 | val javaAudioDeviceModuleCustomizer: ((builder: JavaAudioDeviceModule.Builder) -> Unit)? = null, |
| 82 | +) | ||
| 47 | 83 | ||
| 84 | +sealed class AudioType(val audioMode: Int, val audioAttributes: AudioAttributes, val audioStreamType: Int) { | ||
| 48 | /** | 85 | /** |
| 49 | - * Override the [VideoEncoderFactory] used by the library. | 86 | + * An audio type for general media playback usage (i.e. listener-only use cases). |
| 87 | + * | ||
| 88 | + * Audio routing is handled automatically by the system in normal media mode, | ||
| 89 | + * and bluetooth microphones may not work on some devices. | ||
| 90 | + * | ||
| 91 | + * The default [AudioHandler] for this type is [AudioFocusHandler]. | ||
| 50 | */ | 92 | */ |
| 51 | - val videoEncoderFactory: VideoEncoderFactory? = null, | 93 | + class MediaAudioType : AudioType( |
| 94 | + AudioManager.MODE_NORMAL, | ||
| 95 | + AudioAttributes.Builder() | ||
| 96 | + .setUsage(AudioAttributes.USAGE_MEDIA) | ||
| 97 | + .setContentType(AudioAttributes.CONTENT_TYPE_SPEECH) | ||
| 98 | + .build(), | ||
| 99 | + AudioManager.STREAM_MUSIC | ||
| 100 | + ) | ||
| 52 | 101 | ||
| 53 | /** | 102 | /** |
| 54 | - * Override the [VideoDecoderFactory] used by the library. | 103 | + * An audio type for calls (i.e. participating in the call or publishing local microphone). |
| 104 | + * | ||
| 105 | + * Audio routing can be manually controlled. | ||
| 106 | + * | ||
| 107 | + * The default [AudioHandler] for this type is [AudioSwitchHandler]. | ||
| 55 | */ | 108 | */ |
| 56 | - val videoDecoderFactory: VideoDecoderFactory? = null, | 109 | + class CallAudioType : AudioType( |
| 110 | + AudioManager.MODE_IN_COMMUNICATION, | ||
| 111 | + AudioAttributes.Builder() | ||
| 112 | + .setUsage(AudioAttributes.USAGE_VOICE_COMMUNICATION) | ||
| 113 | + .setContentType(AudioAttributes.CONTENT_TYPE_SPEECH) | ||
| 114 | + .build(), | ||
| 115 | + AudioManager.STREAM_VOICE_CALL | ||
| 116 | + ) | ||
| 57 | 117 | ||
| 58 | /** | 118 | /** |
| 59 | - * Override the default [AudioHandler]. | 119 | + * An audio type that takes in a user-defined [AudioAttributes] and audio stream type. |
| 60 | * | 120 | * |
| 61 | - * Use [NoAudioHandler] to turn off automatic audio handling. | 121 | + * The default [AudioHandler] for this type is [AudioFocusHandler]. |
| 62 | */ | 122 | */ |
| 63 | - | ||
| 64 | - val audioHandler: AudioHandler? = null | ||
| 65 | -) | ||
| 123 | + class CustomAudioType(audioMode: Int, audioAttributes: AudioAttributes, audioStreamType: Int) : | ||
| 124 | + AudioType(audioMode, audioAttributes, audioStreamType) | ||
| 125 | +} |
| 1 | +/* | ||
| 2 | + * Copyright 2023 LiveKit, Inc. | ||
| 3 | + * | ||
| 4 | + * Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | + * you may not use this file except in compliance with the License. | ||
| 6 | + * You may obtain a copy of the License at | ||
| 7 | + * | ||
| 8 | + * http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | + * | ||
| 10 | + * Unless required by applicable law or agreed to in writing, software | ||
| 11 | + * distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | + * See the License for the specific language governing permissions and | ||
| 14 | + * limitations under the License. | ||
| 15 | + */ | ||
| 16 | + | ||
| 17 | +package io.livekit.android.audio | ||
| 18 | + | ||
| 19 | +import android.content.Context | ||
| 20 | +import android.media.AudioAttributes | ||
| 21 | +import android.media.AudioFocusRequest | ||
| 22 | +import android.media.AudioManager | ||
| 23 | +import android.os.Build | ||
| 24 | +import androidx.annotation.RequiresApi | ||
| 25 | +import javax.inject.Inject | ||
| 26 | +import javax.inject.Singleton | ||
| 27 | + | ||
| 28 | +/** | ||
| 29 | + * A basic [AudioHandler] that manages audio focus while started. | ||
| 30 | + */ | ||
| 31 | +@Singleton | ||
| 32 | +open class AudioFocusHandler | ||
| 33 | +@Inject | ||
| 34 | +constructor(context: Context) : AudioHandler { | ||
| 35 | + | ||
| 36 | + /** | ||
| 37 | + * The audio focus mode to use while started. | ||
| 38 | + * | ||
| 39 | + * Defaults to [AudioManager.AUDIOFOCUS_GAIN]. | ||
| 40 | + */ | ||
| 41 | + var focusMode: Int = AudioManager.AUDIOFOCUS_GAIN | ||
| 42 | + | ||
| 43 | + /** | ||
| 44 | + * The audio stream type to use when requesting audio focus on pre-O devices. | ||
| 45 | + * | ||
| 46 | + * Defaults to [AudioManager.STREAM_MUSIC]. | ||
| 47 | + * | ||
| 48 | + * Refer to this [compatibility table](https://source.android.com/docs/core/audio/attributes#compatibility) | ||
| 49 | + * to ensure that your values match between android versions. | ||
| 50 | + */ | ||
| 51 | + var audioStreamType: Int = AudioManager.STREAM_MUSIC | ||
| 52 | + | ||
| 53 | + /** | ||
| 54 | + * The audio attribute usage type to use when requesting audio focus on devices O and beyond. | ||
| 55 | + * | ||
| 56 | + * Defaults to [AudioAttributes.USAGE_MEDIA]. | ||
| 57 | + * | ||
| 58 | + * Refer to this [compatibility table](https://source.android.com/docs/core/audio/attributes#compatibility) | ||
| 59 | + * to ensure that your values match between android versions. | ||
| 60 | + */ | ||
| 61 | + var audioAttributeUsageType: Int = AudioAttributes.USAGE_MEDIA | ||
| 62 | + | ||
| 63 | + /** | ||
| 64 | + * The audio attribute content type to use when requesting audio focus on devices O and beyond. | ||
| 65 | + * | ||
| 66 | + * Defaults to [AudioAttributes.CONTENT_TYPE_SPEECH]. | ||
| 67 | + * | ||
| 68 | + * Refer to this [compatibility table](https://source.android.com/docs/core/audio/attributes#compatibility) | ||
| 69 | + * to ensure that your values match between android versions. | ||
| 70 | + */ | ||
| 71 | + var audioAttributeContentType: Int = AudioAttributes.CONTENT_TYPE_SPEECH | ||
| 72 | + | ||
| 73 | + private val audioManager = context.getSystemService(Context.AUDIO_SERVICE) as AudioManager | ||
| 74 | + private var audioRequest: AudioFocusRequest? = null | ||
| 75 | + private var audioFocusListener = AudioManager.OnAudioFocusChangeListener { | ||
| 76 | + onAudioFocusChangeListener?.onAudioFocusChange(it) | ||
| 77 | + } | ||
| 78 | + | ||
| 79 | + var onAudioFocusChangeListener: AudioManager.OnAudioFocusChangeListener? = null | ||
| 80 | + | ||
| 81 | + override fun start() { | ||
| 82 | + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { | ||
| 83 | + audioRequest = createAudioRequest() | ||
| 84 | + audioRequest?.let { audioManager.requestAudioFocus(it) } | ||
| 85 | + } else { | ||
| 86 | + @Suppress("DEPRECATION") | ||
| 87 | + audioManager.requestAudioFocus(audioFocusListener, audioStreamType, focusMode) | ||
| 88 | + } | ||
| 89 | + } | ||
| 90 | + | ||
| 91 | + override fun stop() { | ||
| 92 | + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { | ||
| 93 | + audioRequest?.let { audioManager.abandonAudioFocusRequest(it) } | ||
| 94 | + audioRequest = null | ||
| 95 | + } else { | ||
| 96 | + @Suppress("DEPRECATION") | ||
| 97 | + audioManager.abandonAudioFocus(onAudioFocusChangeListener) | ||
| 98 | + } | ||
| 99 | + } | ||
| 100 | + | ||
| 101 | + @RequiresApi(Build.VERSION_CODES.O) | ||
| 102 | + open fun createAudioRequest(): AudioFocusRequest { | ||
| 103 | + return AudioFocusRequest.Builder(focusMode) | ||
| 104 | + .setOnAudioFocusChangeListener(audioFocusListener) | ||
| 105 | + .setAudioAttributes( | ||
| 106 | + AudioAttributes.Builder() | ||
| 107 | + .setUsage(audioAttributeUsageType) | ||
| 108 | + .setContentType(audioAttributeContentType) | ||
| 109 | + .build() | ||
| 110 | + ) | ||
| 111 | + .build() | ||
| 112 | + } | ||
| 113 | +} |
| @@ -17,7 +17,7 @@ | @@ -17,7 +17,7 @@ | ||
| 17 | package io.livekit.android.audio | 17 | package io.livekit.android.audio |
| 18 | 18 | ||
| 19 | /** | 19 | /** |
| 20 | - * Interface for handling android audio routing. | 20 | + * Interface for handling android audio. |
| 21 | */ | 21 | */ |
| 22 | interface AudioHandler { | 22 | interface AudioHandler { |
| 23 | /** | 23 | /** |
| @@ -17,6 +17,7 @@ | @@ -17,6 +17,7 @@ | ||
| 17 | package io.livekit.android.audio | 17 | package io.livekit.android.audio |
| 18 | 18 | ||
| 19 | import android.content.Context | 19 | import android.content.Context |
| 20 | +import android.media.AudioAttributes | ||
| 20 | import android.media.AudioManager | 21 | import android.media.AudioManager |
| 21 | import android.os.Build | 22 | import android.os.Build |
| 22 | import android.os.Handler | 23 | import android.os.Handler |
| @@ -65,11 +66,20 @@ constructor(private val context: Context) : AudioHandler { | @@ -65,11 +66,20 @@ constructor(private val context: Context) : AudioHandler { | ||
| 65 | var preferredDeviceList: List<Class<out AudioDevice>>? = null | 66 | var preferredDeviceList: List<Class<out AudioDevice>>? = null |
| 66 | 67 | ||
| 67 | /** | 68 | /** |
| 68 | - * The audio mode to use while started. | 69 | + * When true, AudioSwitchHandler will request audio focus on start and abandon on stop. |
| 69 | * | 70 | * |
| 70 | - * Defaults to [AudioManager.MODE_NORMAL]. | 71 | + * Defaults to true. |
| 71 | */ | 72 | */ |
| 72 | - var audioMode: Int = AudioManager.MODE_NORMAL | 73 | + var manageAudioFocus = true |
| 74 | + | ||
| 75 | + /** | ||
| 76 | + * The audio mode to use when requesting audio focus. | ||
| 77 | + * | ||
| 78 | + * Defaults to [AudioManager.MODE_IN_COMMUNICATION]. | ||
| 79 | + * | ||
| 80 | + * Note: Manual audio routing may not work appropriately when using non-default values. | ||
| 81 | + */ | ||
| 82 | + var audioMode: Int = AudioManager.MODE_IN_COMMUNICATION | ||
| 73 | 83 | ||
| 74 | /** | 84 | /** |
| 75 | * The audio focus mode to use while started. | 85 | * The audio focus mode to use while started. |
| @@ -78,6 +88,43 @@ constructor(private val context: Context) : AudioHandler { | @@ -78,6 +88,43 @@ constructor(private val context: Context) : AudioHandler { | ||
| 78 | */ | 88 | */ |
| 79 | var focusMode: Int = AudioManager.AUDIOFOCUS_GAIN | 89 | var focusMode: Int = AudioManager.AUDIOFOCUS_GAIN |
| 80 | 90 | ||
| 91 | + /** | ||
| 92 | + * The audio stream type to use when requesting audio focus on pre-O devices. | ||
| 93 | + * | ||
| 94 | + * Defaults to [AudioManager.STREAM_VOICE_CALL]. | ||
| 95 | + * | ||
| 96 | + * Refer to this [compatibility table](https://source.android.com/docs/core/audio/attributes#compatibility) | ||
| 97 | + * to ensure that your values match between android versions. | ||
| 98 | + * | ||
| 99 | + * Note: Manual audio routing may not work appropriately when using non-default values. | ||
| 100 | + */ | ||
| 101 | + var audioStreamType: Int = AudioManager.STREAM_VOICE_CALL | ||
| 102 | + | ||
| 103 | + /** | ||
| 104 | + * The audio attribute usage type to use when requesting audio focus on devices O and beyond. | ||
| 105 | + * | ||
| 106 | + * Defaults to [AudioAttributes.USAGE_VOICE_COMMUNICATION]. | ||
| 107 | + * | ||
| 108 | + * Refer to this [compatibility table](https://source.android.com/docs/core/audio/attributes#compatibility) | ||
| 109 | + * to ensure that your values match between android versions. | ||
| 110 | + * | ||
| 111 | + * Note: Manual audio routing may not work appropriately when using non-default values. | ||
| 112 | + */ | ||
| 113 | + var audioAttributeUsageType: Int = AudioAttributes.USAGE_VOICE_COMMUNICATION | ||
| 114 | + | ||
| 115 | + /** | ||
| 116 | + * The audio attribute content type to use when requesting audio focus on devices O and beyond. | ||
| 117 | + * | ||
| 118 | + * Defaults to [AudioAttributes.CONTENT_TYPE_SPEECH]. | ||
| 119 | + * | ||
| 120 | + * Refer to this [compatibility table](https://source.android.com/docs/core/audio/attributes#compatibility) | ||
| 121 | + * to ensure that your values match between android versions. | ||
| 122 | + * | ||
| 123 | + * Note: Manual audio routing may not work appropriately when using non-default values. | ||
| 124 | + */ | ||
| 125 | + var audioAttributeContentType: Int = AudioAttributes.CONTENT_TYPE_SPEECH | ||
| 126 | + | ||
| 127 | + | ||
| 81 | private var audioSwitch: AbstractAudioSwitch? = null | 128 | private var audioSwitch: AbstractAudioSwitch? = null |
| 82 | 129 | ||
| 83 | // AudioSwitch is not threadsafe, so all calls should be done on the main thread. | 130 | // AudioSwitch is not threadsafe, so all calls should be done on the main thread. |
| @@ -103,8 +150,13 @@ constructor(private val context: Context) : AudioHandler { | @@ -103,8 +150,13 @@ constructor(private val context: Context) : AudioHandler { | ||
| 103 | preferredDeviceList = preferredDeviceList ?: defaultPreferredDeviceList | 150 | preferredDeviceList = preferredDeviceList ?: defaultPreferredDeviceList |
| 104 | ) | 151 | ) |
| 105 | } | 152 | } |
| 153 | + switch.manageAudioFocus = manageAudioFocus | ||
| 106 | switch.audioMode = audioMode | 154 | switch.audioMode = audioMode |
| 107 | switch.focusMode = focusMode | 155 | switch.focusMode = focusMode |
| 156 | + switch.audioStreamType = audioStreamType | ||
| 157 | + switch.audioAttributeUsageType = audioAttributeUsageType | ||
| 158 | + switch.audioAttributeContentType = audioAttributeContentType | ||
| 159 | + | ||
| 108 | audioSwitch = switch | 160 | audioSwitch = switch |
| 109 | switch.start(audioDeviceChangeListener ?: defaultAudioDeviceChangeListener) | 161 | switch.start(audioDeviceChangeListener ?: defaultAudioDeviceChangeListener) |
| 110 | switch.activate() | 162 | switch.activate() |
| @@ -16,23 +16,58 @@ | @@ -16,23 +16,58 @@ | ||
| 16 | 16 | ||
| 17 | package io.livekit.android.dagger | 17 | package io.livekit.android.dagger |
| 18 | 18 | ||
| 19 | -import androidx.annotation.Nullable | 19 | +import android.media.AudioAttributes |
| 20 | import dagger.Module | 20 | import dagger.Module |
| 21 | import dagger.Provides | 21 | import dagger.Provides |
| 22 | +import io.livekit.android.AudioType | ||
| 23 | +import io.livekit.android.audio.AudioFocusHandler | ||
| 22 | import io.livekit.android.audio.AudioHandler | 24 | import io.livekit.android.audio.AudioHandler |
| 23 | import io.livekit.android.audio.AudioSwitchHandler | 25 | import io.livekit.android.audio.AudioSwitchHandler |
| 24 | import javax.inject.Named | 26 | import javax.inject.Named |
| 25 | import javax.inject.Provider | 27 | import javax.inject.Provider |
| 28 | +import javax.inject.Singleton | ||
| 26 | 29 | ||
| 27 | @Module | 30 | @Module |
| 28 | object AudioHandlerModule { | 31 | object AudioHandlerModule { |
| 32 | + | ||
| 33 | + @Provides | ||
| 34 | + fun audioOutputType( | ||
| 35 | + @Named(InjectionNames.OVERRIDE_AUDIO_OUTPUT_TYPE) | ||
| 36 | + audioOutputOverride: AudioType?, | ||
| 37 | + ): AudioType { | ||
| 38 | + return audioOutputOverride ?: AudioType.CallAudioType() | ||
| 39 | + } | ||
| 40 | + | ||
| 29 | @Provides | 41 | @Provides |
| 42 | + fun audioOutputAttributes( | ||
| 43 | + audioType: AudioType | ||
| 44 | + ): AudioAttributes { | ||
| 45 | + return audioType.audioAttributes | ||
| 46 | + } | ||
| 47 | + | ||
| 48 | + @Provides | ||
| 49 | + @Singleton | ||
| 30 | fun audioHandler( | 50 | fun audioHandler( |
| 31 | audioSwitchHandler: Provider<AudioSwitchHandler>, | 51 | audioSwitchHandler: Provider<AudioSwitchHandler>, |
| 52 | + audioFocusHandler: Provider<AudioFocusHandler>, | ||
| 32 | @Named(InjectionNames.OVERRIDE_AUDIO_HANDLER) | 53 | @Named(InjectionNames.OVERRIDE_AUDIO_HANDLER) |
| 33 | - @Nullable | ||
| 34 | - audioHandlerOverride: AudioHandler? | 54 | + audioHandlerOverride: AudioHandler?, |
| 55 | + audioOutputType: AudioType, | ||
| 35 | ): AudioHandler { | 56 | ): AudioHandler { |
| 36 | - return audioHandlerOverride ?: audioSwitchHandler.get() | 57 | + return audioHandlerOverride ?: when (audioOutputType) { |
| 58 | + is AudioType.CallAudioType -> { | ||
| 59 | + audioSwitchHandler.get().apply { | ||
| 60 | + audioMode = audioOutputType.audioMode | ||
| 61 | + audioAttributeContentType = audioOutputType.audioAttributes.contentType | ||
| 62 | + audioAttributeUsageType = audioOutputType.audioAttributes.usage | ||
| 63 | + audioStreamType = audioOutputType.audioStreamType | ||
| 64 | + } | ||
| 65 | + } | ||
| 66 | + | ||
| 67 | + is AudioType.MediaAudioType, | ||
| 68 | + is AudioType.CustomAudioType -> { | ||
| 69 | + audioFocusHandler.get() | ||
| 70 | + } | ||
| 71 | + } | ||
| 37 | } | 72 | } |
| 38 | } | 73 | } |
| @@ -49,4 +49,5 @@ object InjectionNames { | @@ -49,4 +49,5 @@ object InjectionNames { | ||
| 49 | internal const val OVERRIDE_VIDEO_ENCODER_FACTORY = "override_video_encoder_factory" | 49 | internal const val OVERRIDE_VIDEO_ENCODER_FACTORY = "override_video_encoder_factory" |
| 50 | internal const val OVERRIDE_VIDEO_DECODER_FACTORY = "override_video_decoder_factory" | 50 | internal const val OVERRIDE_VIDEO_DECODER_FACTORY = "override_video_decoder_factory" |
| 51 | internal const val OVERRIDE_AUDIO_HANDLER = "override_audio_handler" | 51 | internal const val OVERRIDE_AUDIO_HANDLER = "override_audio_handler" |
| 52 | + internal const val OVERRIDE_AUDIO_OUTPUT_TYPE = "override_audio_output_type" | ||
| 52 | } | 53 | } |
| @@ -16,12 +16,14 @@ | @@ -16,12 +16,14 @@ | ||
| 16 | 16 | ||
| 17 | package io.livekit.android.dagger | 17 | package io.livekit.android.dagger |
| 18 | 18 | ||
| 19 | +import android.annotation.SuppressLint | ||
| 19 | import androidx.annotation.Nullable | 20 | import androidx.annotation.Nullable |
| 20 | import dagger.Module | 21 | import dagger.Module |
| 21 | import dagger.Provides | 22 | import dagger.Provides |
| 22 | import io.livekit.android.LiveKitOverrides | 23 | import io.livekit.android.LiveKitOverrides |
| 23 | import javax.inject.Named | 24 | import javax.inject.Named |
| 24 | 25 | ||
| 26 | +@SuppressLint("KotlinNullnessAnnotation") | ||
| 25 | @Module | 27 | @Module |
| 26 | class OverridesModule(private val overrides: LiveKitOverrides) { | 28 | class OverridesModule(private val overrides: LiveKitOverrides) { |
| 27 | 29 | ||
| @@ -33,12 +35,12 @@ class OverridesModule(private val overrides: LiveKitOverrides) { | @@ -33,12 +35,12 @@ class OverridesModule(private val overrides: LiveKitOverrides) { | ||
| 33 | @Provides | 35 | @Provides |
| 34 | @Named(InjectionNames.OVERRIDE_AUDIO_DEVICE_MODULE) | 36 | @Named(InjectionNames.OVERRIDE_AUDIO_DEVICE_MODULE) |
| 35 | @Nullable | 37 | @Nullable |
| 36 | - fun audioDeviceModule() = overrides.audioDeviceModule | 38 | + fun audioDeviceModule() = overrides.audioOptions?.audioDeviceModule |
| 37 | 39 | ||
| 38 | @Provides | 40 | @Provides |
| 39 | @Named(InjectionNames.OVERRIDE_JAVA_AUDIO_DEVICE_MODULE_CUSTOMIZER) | 41 | @Named(InjectionNames.OVERRIDE_JAVA_AUDIO_DEVICE_MODULE_CUSTOMIZER) |
| 40 | @Nullable | 42 | @Nullable |
| 41 | - fun javaAudioDeviceModuleCustomizer() = overrides.javaAudioDeviceModuleCustomizer | 43 | + fun javaAudioDeviceModuleCustomizer() = overrides.audioOptions?.javaAudioDeviceModuleCustomizer |
| 42 | 44 | ||
| 43 | @Provides | 45 | @Provides |
| 44 | @Named(InjectionNames.OVERRIDE_VIDEO_ENCODER_FACTORY) | 46 | @Named(InjectionNames.OVERRIDE_VIDEO_ENCODER_FACTORY) |
| @@ -53,6 +55,10 @@ class OverridesModule(private val overrides: LiveKitOverrides) { | @@ -53,6 +55,10 @@ class OverridesModule(private val overrides: LiveKitOverrides) { | ||
| 53 | @Provides | 55 | @Provides |
| 54 | @Named(InjectionNames.OVERRIDE_AUDIO_HANDLER) | 56 | @Named(InjectionNames.OVERRIDE_AUDIO_HANDLER) |
| 55 | @Nullable | 57 | @Nullable |
| 56 | - fun audioHandler() = overrides.audioHandler | 58 | + fun audioHandler() = overrides.audioOptions?.audioHandler |
| 59 | + | ||
| 60 | + @Provides | ||
| 61 | + @Named(InjectionNames.OVERRIDE_AUDIO_OUTPUT_TYPE) | ||
| 62 | + fun audioOutputType() = overrides.audioOptions?.audioOutputType | ||
| 57 | 63 | ||
| 58 | } | 64 | } |
| @@ -17,6 +17,7 @@ | @@ -17,6 +17,7 @@ | ||
| 17 | package io.livekit.android.dagger | 17 | package io.livekit.android.dagger |
| 18 | 18 | ||
| 19 | import android.content.Context | 19 | import android.content.Context |
| 20 | +import android.media.AudioAttributes | ||
| 20 | import android.media.MediaRecorder | 21 | import android.media.MediaRecorder |
| 21 | import android.os.Build | 22 | import android.os.Build |
| 22 | import androidx.annotation.Nullable | 23 | import androidx.annotation.Nullable |
| @@ -48,6 +49,7 @@ object RTCModule { | @@ -48,6 +49,7 @@ object RTCModule { | ||
| 48 | @Named(InjectionNames.OVERRIDE_JAVA_AUDIO_DEVICE_MODULE_CUSTOMIZER) | 49 | @Named(InjectionNames.OVERRIDE_JAVA_AUDIO_DEVICE_MODULE_CUSTOMIZER) |
| 49 | @Nullable | 50 | @Nullable |
| 50 | moduleCustomizer: ((builder: JavaAudioDeviceModule.Builder) -> Unit)?, | 51 | moduleCustomizer: ((builder: JavaAudioDeviceModule.Builder) -> Unit)?, |
| 52 | + audioOutputAttributes: AudioAttributes, | ||
| 51 | appContext: Context | 53 | appContext: Context |
| 52 | ): AudioDeviceModule { | 54 | ): AudioDeviceModule { |
| 53 | if (audioDeviceModuleOverride != null) { | 55 | if (audioDeviceModuleOverride != null) { |
| @@ -113,6 +115,7 @@ object RTCModule { | @@ -113,6 +115,7 @@ object RTCModule { | ||
| 113 | } | 115 | } |
| 114 | 116 | ||
| 115 | val useHardwareAudioProcessing = Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q | 117 | val useHardwareAudioProcessing = Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q |
| 118 | + | ||
| 116 | val builder = JavaAudioDeviceModule.builder(appContext) | 119 | val builder = JavaAudioDeviceModule.builder(appContext) |
| 117 | .setUseHardwareAcousticEchoCanceler(useHardwareAudioProcessing) | 120 | .setUseHardwareAcousticEchoCanceler(useHardwareAudioProcessing) |
| 118 | .setUseHardwareNoiseSuppressor(useHardwareAudioProcessing) | 121 | .setUseHardwareNoiseSuppressor(useHardwareAudioProcessing) |
| @@ -121,6 +124,7 @@ object RTCModule { | @@ -121,6 +124,7 @@ object RTCModule { | ||
| 121 | .setAudioRecordStateCallback(audioRecordStateCallback) | 124 | .setAudioRecordStateCallback(audioRecordStateCallback) |
| 122 | .setAudioTrackStateCallback(audioTrackStateCallback) | 125 | .setAudioTrackStateCallback(audioTrackStateCallback) |
| 123 | .setAudioSource(MediaRecorder.AudioSource.DEFAULT) | 126 | .setAudioSource(MediaRecorder.AudioSource.DEFAULT) |
| 127 | + .setAudioAttributes(audioOutputAttributes) | ||
| 124 | 128 | ||
| 125 | moduleCustomizer?.invoke(builder) | 129 | moduleCustomizer?.invoke(builder) |
| 126 | return builder.createAudioDeviceModule() | 130 | return builder.createAudioDeviceModule() |
| @@ -10,7 +10,6 @@ import androidx.lifecycle.MutableLiveData | @@ -10,7 +10,6 @@ import androidx.lifecycle.MutableLiveData | ||
| 10 | import androidx.lifecycle.viewModelScope | 10 | import androidx.lifecycle.viewModelScope |
| 11 | import com.github.ajalt.timberkt.Timber | 11 | import com.github.ajalt.timberkt.Timber |
| 12 | import io.livekit.android.LiveKit | 12 | import io.livekit.android.LiveKit |
| 13 | -import io.livekit.android.LiveKitOverrides | ||
| 14 | import io.livekit.android.RoomOptions | 13 | import io.livekit.android.RoomOptions |
| 15 | import io.livekit.android.audio.AudioSwitchHandler | 14 | import io.livekit.android.audio.AudioSwitchHandler |
| 16 | import io.livekit.android.e2ee.E2EEOptions | 15 | import io.livekit.android.e2ee.E2EEOptions |
| @@ -42,7 +41,6 @@ class CallViewModel( | @@ -42,7 +41,6 @@ class CallViewModel( | ||
| 42 | val e2ee: Boolean = false, | 41 | val e2ee: Boolean = false, |
| 43 | val e2eeKey: String? = "", | 42 | val e2eeKey: String? = "", |
| 44 | ) : AndroidViewModel(application) { | 43 | ) : AndroidViewModel(application) { |
| 45 | - val audioHandler = AudioSwitchHandler(application) | ||
| 46 | 44 | ||
| 47 | private fun getE2EEOptions(): E2EEOptions? { | 45 | private fun getE2EEOptions(): E2EEOptions? { |
| 48 | var e2eeOptions: E2EEOptions? = null | 46 | var e2eeOptions: E2EEOptions? = null |
| @@ -53,16 +51,13 @@ class CallViewModel( | @@ -53,16 +51,13 @@ class CallViewModel( | ||
| 53 | return e2eeOptions | 51 | return e2eeOptions |
| 54 | } | 52 | } |
| 55 | 53 | ||
| 56 | - | ||
| 57 | - | ||
| 58 | val room = LiveKit.create( | 54 | val room = LiveKit.create( |
| 59 | appContext = application, | 55 | appContext = application, |
| 60 | options = RoomOptions(adaptiveStream = true, dynacast = true), | 56 | options = RoomOptions(adaptiveStream = true, dynacast = true), |
| 61 | - overrides = LiveKitOverrides( | ||
| 62 | - audioHandler = audioHandler | ||
| 63 | - ) | ||
| 64 | ) | 57 | ) |
| 65 | 58 | ||
| 59 | + val audioHandler = room.audioHandler as AudioSwitchHandler | ||
| 60 | + | ||
| 66 | val participants = room::remoteParticipants.flow | 61 | val participants = room::remoteParticipants.flow |
| 67 | .map { remoteParticipants -> | 62 | .map { remoteParticipants -> |
| 68 | listOf<Participant>(room.localParticipant) + | 63 | listOf<Participant>(room.localParticipant) + |
| @@ -15,6 +15,7 @@ import androidx.compose.runtime.livedata.observeAsState | @@ -15,6 +15,7 @@ import androidx.compose.runtime.livedata.observeAsState | ||
| 15 | import androidx.compose.ui.Modifier | 15 | import androidx.compose.ui.Modifier |
| 16 | import androidx.lifecycle.MutableLiveData | 16 | import androidx.lifecycle.MutableLiveData |
| 17 | import androidx.lifecycle.lifecycleScope | 17 | import androidx.lifecycle.lifecycleScope |
| 18 | +import io.livekit.android.AudioOptions | ||
| 18 | import io.livekit.android.LiveKit | 19 | import io.livekit.android.LiveKit |
| 19 | import io.livekit.android.LiveKitOverrides | 20 | import io.livekit.android.LiveKitOverrides |
| 20 | import io.livekit.android.room.Room | 21 | import io.livekit.android.room.Room |
| @@ -26,7 +27,7 @@ import kotlinx.coroutines.launch | @@ -26,7 +27,7 @@ import kotlinx.coroutines.launch | ||
| 26 | import org.webrtc.EglBase | 27 | import org.webrtc.EglBase |
| 27 | import java.io.File | 28 | import java.io.File |
| 28 | import java.io.IOException | 29 | import java.io.IOException |
| 29 | -import java.util.* | 30 | +import java.util.Date |
| 30 | 31 | ||
| 31 | class MainActivity : ComponentActivity() { | 32 | class MainActivity : ComponentActivity() { |
| 32 | lateinit var room: Room | 33 | lateinit var room: Room |
| @@ -40,12 +41,14 @@ class MainActivity : ComponentActivity() { | @@ -40,12 +41,14 @@ class MainActivity : ComponentActivity() { | ||
| 40 | room = LiveKit.create( | 41 | room = LiveKit.create( |
| 41 | appContext = applicationContext, | 42 | appContext = applicationContext, |
| 42 | overrides = LiveKitOverrides( | 43 | overrides = LiveKitOverrides( |
| 43 | - javaAudioDeviceModuleCustomizer = { builder -> | ||
| 44 | - // Receive audio samples | ||
| 45 | - builder.setSamplesReadyCallback { samples -> | ||
| 46 | - videoFileRenderer?.onWebRtcAudioRecordSamplesReady(samples) | 44 | + audioOptions = AudioOptions( |
| 45 | + javaAudioDeviceModuleCustomizer = { builder -> | ||
| 46 | + // Receive audio samples | ||
| 47 | + builder.setSamplesReadyCallback { samples -> | ||
| 48 | + videoFileRenderer?.onWebRtcAudioRecordSamplesReady(samples) | ||
| 49 | + } | ||
| 47 | } | 50 | } |
| 48 | - } | 51 | + ), |
| 49 | ) | 52 | ) |
| 50 | ) | 53 | ) |
| 51 | 54 |
-
请 注册 或 登录 后发表评论