Committed by
GitHub
feat: add external audio processing api. (#319)
* feat: add exteranl audio processing api. * update. * fix spotlessKotlinCheck. * update. * Comments. * fix typo. * upgrade libwebrtc to 114.5735.08. * code format. * rename and add more method. * update. * add siwtch dialog for audio processor. * format. * format. * add AudioProcessorOptions. * revert changes. * Code cleanup * spotless * fix test compile * spotless * Renaming of functions and removing url/token checking from isEnabled --------- Co-authored-by: davidliu <davidliu@deviange.net>
正在显示
19 个修改的文件
包含
538 行增加
和
8 行删除
| @@ -20,6 +20,7 @@ import android.media.AudioAttributes | @@ -20,6 +20,7 @@ import android.media.AudioAttributes | ||
| 20 | import android.media.AudioManager | 20 | import android.media.AudioManager |
| 21 | import io.livekit.android.audio.AudioFocusHandler | 21 | import io.livekit.android.audio.AudioFocusHandler |
| 22 | import io.livekit.android.audio.AudioHandler | 22 | import io.livekit.android.audio.AudioHandler |
| 23 | +import io.livekit.android.audio.AudioProcessorOptions | ||
| 23 | import io.livekit.android.audio.AudioSwitchHandler | 24 | import io.livekit.android.audio.AudioSwitchHandler |
| 24 | import io.livekit.android.audio.NoAudioHandler | 25 | import io.livekit.android.audio.NoAudioHandler |
| 25 | import io.livekit.android.room.Room | 26 | import io.livekit.android.room.Room |
| @@ -126,6 +127,11 @@ class AudioOptions( | @@ -126,6 +127,11 @@ class AudioOptions( | ||
| 126 | * [AudioManager.MODE_IN_COMMUNICATION]. | 127 | * [AudioManager.MODE_IN_COMMUNICATION]. |
| 127 | */ | 128 | */ |
| 128 | val disableCommunicationModeWorkaround: Boolean = false, | 129 | val disableCommunicationModeWorkaround: Boolean = false, |
| 130 | + | ||
| 131 | + /** | ||
| 132 | + * Options for processing the mic and incoming audio. | ||
| 133 | + */ | ||
| 134 | + val audioProcessorOptions: AudioProcessorOptions? = null, | ||
| 129 | ) | 135 | ) |
| 130 | 136 | ||
| 131 | /** | 137 | /** |
| 1 | /* | 1 | /* |
| 2 | - * Copyright 2023 LiveKit, Inc. | 2 | + * Copyright 2023-2024 LiveKit, Inc. |
| 3 | * | 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); | 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. | 5 | * you may not use this file except in compliance with the License. |
| 1 | +/* | ||
| 2 | + * Copyright 2023-2024 LiveKit, Inc. | ||
| 3 | + * | ||
| 4 | + * Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | + * you may not use this file except in compliance with the License. | ||
| 6 | + * You may obtain a copy of the License at | ||
| 7 | + * | ||
| 8 | + * http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | + * | ||
| 10 | + * Unless required by applicable law or agreed to in writing, software | ||
| 11 | + * distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | + * See the License for the specific language governing permissions and | ||
| 14 | + * limitations under the License. | ||
| 15 | + */ | ||
| 16 | + | ||
| 17 | +package io.livekit.android.audio | ||
| 18 | + | ||
| 19 | +/** | ||
| 20 | + * Interface for controlling external audio processing. | ||
| 21 | + */ | ||
| 22 | +interface AudioProcessingController { | ||
| 23 | + /** | ||
| 24 | + * Set the audio processing to be used for capture post. | ||
| 25 | + */ | ||
| 26 | + fun setCapturePostProcessing(processing: AudioProcessorInterface?) | ||
| 27 | + | ||
| 28 | + /** | ||
| 29 | + * Set whether to bypass mode the capture post processing. | ||
| 30 | + */ | ||
| 31 | + fun setBypassForCapturePostProcessing(bypass: Boolean) | ||
| 32 | + | ||
| 33 | + /** | ||
| 34 | + * Set the audio processing to be used for render pre. | ||
| 35 | + */ | ||
| 36 | + fun setRenderPreProcessing(processing: AudioProcessorInterface?) | ||
| 37 | + | ||
| 38 | + /** | ||
| 39 | + * Set whether to bypass mode the render pre processing. | ||
| 40 | + */ | ||
| 41 | + fun setBypassForRenderPreProcessing(bypass: Boolean) | ||
| 42 | +} |
| 1 | +/* | ||
| 2 | + * Copyright 2023-2024 LiveKit, Inc. | ||
| 3 | + * | ||
| 4 | + * Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | + * you may not use this file except in compliance with the License. | ||
| 6 | + * You may obtain a copy of the License at | ||
| 7 | + * | ||
| 8 | + * http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | + * | ||
| 10 | + * Unless required by applicable law or agreed to in writing, software | ||
| 11 | + * distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | + * See the License for the specific language governing permissions and | ||
| 14 | + * limitations under the License. | ||
| 15 | + */ | ||
| 16 | + | ||
| 17 | +package io.livekit.android.audio | ||
| 18 | + | ||
| 19 | +import java.nio.ByteBuffer | ||
| 20 | + | ||
| 21 | +/** | ||
| 22 | + * Interface for external audio processing. | ||
| 23 | + */ | ||
| 24 | +interface AudioProcessorInterface { | ||
| 25 | + /** | ||
| 26 | + * Check if the audio processing is enabled. | ||
| 27 | + */ | ||
| 28 | + fun isEnabled(): Boolean | ||
| 29 | + | ||
| 30 | + /** | ||
| 31 | + * Get the name of the audio processing. | ||
| 32 | + */ | ||
| 33 | + fun getName(): String | ||
| 34 | + | ||
| 35 | + /** | ||
| 36 | + * Initialize the audio processing. | ||
| 37 | + */ | ||
| 38 | + fun initializeAudioProcessing(sampleRateHz: Int, numChannels: Int) | ||
| 39 | + | ||
| 40 | + /** | ||
| 41 | + * Called when the sample rate has changed. | ||
| 42 | + */ | ||
| 43 | + fun resetAudioProcessing(newRate: Int) | ||
| 44 | + | ||
| 45 | + /** | ||
| 46 | + * Process the audio frame (10ms). | ||
| 47 | + */ | ||
| 48 | + fun processAudio(numBands: Int, numFrames: Int, buffer: ByteBuffer) | ||
| 49 | +} |
| 1 | +/* | ||
| 2 | + * Copyright 2024 LiveKit, Inc. | ||
| 3 | + * | ||
| 4 | + * Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | + * you may not use this file except in compliance with the License. | ||
| 6 | + * You may obtain a copy of the License at | ||
| 7 | + * | ||
| 8 | + * http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | + * | ||
| 10 | + * Unless required by applicable law or agreed to in writing, software | ||
| 11 | + * distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | + * See the License for the specific language governing permissions and | ||
| 14 | + * limitations under the License. | ||
| 15 | + */ | ||
| 16 | + | ||
| 17 | +package io.livekit.android.audio | ||
| 18 | + | ||
| 19 | +data class AudioProcessorOptions( | ||
| 20 | + /** | ||
| 21 | + * Audio processor for captured audio. | ||
| 22 | + */ | ||
| 23 | + val capturePostProcessor: AudioProcessorInterface? = null, | ||
| 24 | + /** | ||
| 25 | + * When true, bypass the processing for captured audio. | ||
| 26 | + * | ||
| 27 | + * Noop if [capturePostProcessor] is null. | ||
| 28 | + */ | ||
| 29 | + val capturePostBypass: Boolean = false, | ||
| 30 | + /** | ||
| 31 | + * Audio processor for rendered audio. | ||
| 32 | + */ | ||
| 33 | + val renderPreProcessor: AudioProcessorInterface? = null, | ||
| 34 | + /** | ||
| 35 | + * When true, bypass the processing for rendered audio. | ||
| 36 | + * | ||
| 37 | + * Noop if [renderPreProcessor] is null. | ||
| 38 | + */ | ||
| 39 | + val renderPreBypass: Boolean = false, | ||
| 40 | +) |
| @@ -47,6 +47,7 @@ internal object InjectionNames { | @@ -47,6 +47,7 @@ internal object InjectionNames { | ||
| 47 | // Overrides | 47 | // Overrides |
| 48 | internal const val OVERRIDE_OKHTTP = "override_okhttp" | 48 | internal const val OVERRIDE_OKHTTP = "override_okhttp" |
| 49 | internal const val OVERRIDE_AUDIO_DEVICE_MODULE = "override_audio_device_module" | 49 | internal const val OVERRIDE_AUDIO_DEVICE_MODULE = "override_audio_device_module" |
| 50 | + internal const val OVERRIDE_AUDIO_PROCESSOR_OPTIONS = "override_audio_processor_options" | ||
| 50 | internal const val OVERRIDE_JAVA_AUDIO_DEVICE_MODULE_CUSTOMIZER = "override_java_audio_device_module_customizer" | 51 | internal const val OVERRIDE_JAVA_AUDIO_DEVICE_MODULE_CUSTOMIZER = "override_java_audio_device_module_customizer" |
| 51 | internal const val OVERRIDE_VIDEO_ENCODER_FACTORY = "override_video_encoder_factory" | 52 | internal const val OVERRIDE_VIDEO_ENCODER_FACTORY = "override_video_encoder_factory" |
| 52 | internal const val OVERRIDE_VIDEO_DECODER_FACTORY = "override_video_decoder_factory" | 53 | internal const val OVERRIDE_VIDEO_DECODER_FACTORY = "override_video_decoder_factory" |
| @@ -38,6 +38,11 @@ internal class OverridesModule(private val overrides: LiveKitOverrides) { | @@ -38,6 +38,11 @@ internal class OverridesModule(private val overrides: LiveKitOverrides) { | ||
| 38 | fun audioDeviceModule() = overrides.audioOptions?.audioDeviceModule | 38 | fun audioDeviceModule() = overrides.audioOptions?.audioDeviceModule |
| 39 | 39 | ||
| 40 | @Provides | 40 | @Provides |
| 41 | + @Named(InjectionNames.OVERRIDE_AUDIO_PROCESSOR_OPTIONS) | ||
| 42 | + @Nullable | ||
| 43 | + fun audioProcessorOptions() = overrides.audioOptions?.audioProcessorOptions | ||
| 44 | + | ||
| 45 | + @Provides | ||
| 41 | @Named(InjectionNames.OVERRIDE_JAVA_AUDIO_DEVICE_MODULE_CUSTOMIZER) | 46 | @Named(InjectionNames.OVERRIDE_JAVA_AUDIO_DEVICE_MODULE_CUSTOMIZER) |
| 42 | @Nullable | 47 | @Nullable |
| 43 | fun javaAudioDeviceModuleCustomizer() = overrides.audioOptions?.javaAudioDeviceModuleCustomizer | 48 | fun javaAudioDeviceModuleCustomizer() = overrides.audioOptions?.javaAudioDeviceModuleCustomizer |
| @@ -25,10 +25,13 @@ import androidx.annotation.Nullable | @@ -25,10 +25,13 @@ import androidx.annotation.Nullable | ||
| 25 | import dagger.Module | 25 | import dagger.Module |
| 26 | import dagger.Provides | 26 | import dagger.Provides |
| 27 | import io.livekit.android.LiveKit | 27 | import io.livekit.android.LiveKit |
| 28 | +import io.livekit.android.audio.AudioProcessingController | ||
| 29 | +import io.livekit.android.audio.AudioProcessorOptions | ||
| 28 | import io.livekit.android.audio.CommunicationWorkaround | 30 | import io.livekit.android.audio.CommunicationWorkaround |
| 29 | import io.livekit.android.memory.CloseableManager | 31 | import io.livekit.android.memory.CloseableManager |
| 30 | import io.livekit.android.util.LKLog | 32 | import io.livekit.android.util.LKLog |
| 31 | import io.livekit.android.util.LoggingLevel | 33 | import io.livekit.android.util.LoggingLevel |
| 34 | +import io.livekit.android.webrtc.CustomAudioProcessingFactory | ||
| 32 | import io.livekit.android.webrtc.CustomVideoDecoderFactory | 35 | import io.livekit.android.webrtc.CustomVideoDecoderFactory |
| 33 | import io.livekit.android.webrtc.CustomVideoEncoderFactory | 36 | import io.livekit.android.webrtc.CustomVideoEncoderFactory |
| 34 | import livekit.org.webrtc.* | 37 | import livekit.org.webrtc.* |
| @@ -45,6 +48,22 @@ internal object RTCModule { | @@ -45,6 +48,22 @@ internal object RTCModule { | ||
| 45 | 48 | ||
| 46 | /** | 49 | /** |
| 47 | * Certain classes require libwebrtc to be initialized prior to use. | 50 | * Certain classes require libwebrtc to be initialized prior to use. |
| 51 | + * | ||
| 52 | + * If your provision depends on libwebrtc initialization, just add it | ||
| 53 | + * as a dependency in your method signature. | ||
| 54 | + * | ||
| 55 | + * Example: | ||
| 56 | + * | ||
| 57 | + * ``` | ||
| 58 | + * @Provides | ||
| 59 | + * fun someFactory( | ||
| 60 | + * @Suppress("UNUSED_PARAMETER") | ||
| 61 | + * @Named(InjectionNames.LIB_WEBRTC_INITIALIZATION) | ||
| 62 | + * webrtcInitialization: LibWebrtcInitialization | ||
| 63 | + * ): SomeFactory { | ||
| 64 | + * ... | ||
| 65 | + * } | ||
| 66 | + * ``` | ||
| 48 | */ | 67 | */ |
| 49 | @Provides | 68 | @Provides |
| 50 | @Singleton | 69 | @Singleton |
| @@ -216,6 +235,28 @@ internal object RTCModule { | @@ -216,6 +235,28 @@ internal object RTCModule { | ||
| 216 | } | 235 | } |
| 217 | 236 | ||
| 218 | @Provides | 237 | @Provides |
| 238 | + @Singleton | ||
| 239 | + fun customAudioProcessingFactory( | ||
| 240 | + @Suppress("UNUSED_PARAMETER") | ||
| 241 | + @Named(InjectionNames.LIB_WEBRTC_INITIALIZATION) | ||
| 242 | + webrtcInitialization: LibWebrtcInitialization, | ||
| 243 | + @Named(InjectionNames.OVERRIDE_AUDIO_PROCESSOR_OPTIONS) | ||
| 244 | + audioProcessorOptions: AudioProcessorOptions?, | ||
| 245 | + ): CustomAudioProcessingFactory { | ||
| 246 | + return CustomAudioProcessingFactory(audioProcessorOptions ?: AudioProcessorOptions()) | ||
| 247 | + } | ||
| 248 | + | ||
| 249 | + @Provides | ||
| 250 | + fun audioProcessingController(customAudioProcessingFactory: CustomAudioProcessingFactory): AudioProcessingController { | ||
| 251 | + return customAudioProcessingFactory | ||
| 252 | + } | ||
| 253 | + | ||
| 254 | + @Provides | ||
| 255 | + fun audioProcessingFactory(customAudioProcessingFactory: CustomAudioProcessingFactory): AudioProcessingFactory { | ||
| 256 | + return customAudioProcessingFactory.getAudioProcessingFactory() | ||
| 257 | + } | ||
| 258 | + | ||
| 259 | + @Provides | ||
| 219 | fun videoDecoderFactory( | 260 | fun videoDecoderFactory( |
| 220 | @Suppress("UNUSED_PARAMETER") | 261 | @Suppress("UNUSED_PARAMETER") |
| 221 | @Named(InjectionNames.LIB_WEBRTC_INITIALIZATION) | 262 | @Named(InjectionNames.LIB_WEBRTC_INITIALIZATION) |
| @@ -246,9 +287,11 @@ internal object RTCModule { | @@ -246,9 +287,11 @@ internal object RTCModule { | ||
| 246 | @Named(InjectionNames.OVERRIDE_PEER_CONNECTION_FACTORY_OPTIONS) | 287 | @Named(InjectionNames.OVERRIDE_PEER_CONNECTION_FACTORY_OPTIONS) |
| 247 | peerConnectionFactoryOptions: PeerConnectionFactory.Options?, | 288 | peerConnectionFactoryOptions: PeerConnectionFactory.Options?, |
| 248 | memoryManager: CloseableManager, | 289 | memoryManager: CloseableManager, |
| 290 | + audioProcessingFactory: AudioProcessingFactory?, | ||
| 249 | ): PeerConnectionFactory { | 291 | ): PeerConnectionFactory { |
| 250 | return PeerConnectionFactory.builder() | 292 | return PeerConnectionFactory.builder() |
| 251 | .setAudioDeviceModule(audioDeviceModule) | 293 | .setAudioDeviceModule(audioDeviceModule) |
| 294 | + .setAudioProcessingFactory(audioProcessingFactory) | ||
| 252 | .setVideoEncoderFactory(videoEncoderFactory) | 295 | .setVideoEncoderFactory(videoEncoderFactory) |
| 253 | .setVideoDecoderFactory(videoDecoderFactory) | 296 | .setVideoDecoderFactory(videoDecoderFactory) |
| 254 | .apply { | 297 | .apply { |
| @@ -31,6 +31,8 @@ import io.livekit.android.ConnectOptions | @@ -31,6 +31,8 @@ import io.livekit.android.ConnectOptions | ||
| 31 | import io.livekit.android.RoomOptions | 31 | import io.livekit.android.RoomOptions |
| 32 | import io.livekit.android.Version | 32 | import io.livekit.android.Version |
| 33 | import io.livekit.android.audio.AudioHandler | 33 | import io.livekit.android.audio.AudioHandler |
| 34 | +import io.livekit.android.audio.AudioProcessingController | ||
| 35 | +import io.livekit.android.audio.AudioProcessorOptions | ||
| 34 | import io.livekit.android.audio.CommunicationWorkaround | 36 | import io.livekit.android.audio.CommunicationWorkaround |
| 35 | import io.livekit.android.dagger.InjectionNames | 37 | import io.livekit.android.dagger.InjectionNames |
| 36 | import io.livekit.android.e2ee.E2EEManager | 38 | import io.livekit.android.e2ee.E2EEManager |
| @@ -63,7 +65,7 @@ constructor( | @@ -63,7 +65,7 @@ constructor( | ||
| 63 | @Assisted private val context: Context, | 65 | @Assisted private val context: Context, |
| 64 | private val engine: RTCEngine, | 66 | private val engine: RTCEngine, |
| 65 | private val eglBase: EglBase, | 67 | private val eglBase: EglBase, |
| 66 | - private val localParticipantFactory: LocalParticipant.Factory, | 68 | + localParticipantFactory: LocalParticipant.Factory, |
| 67 | private val defaultsManager: DefaultsManager, | 69 | private val defaultsManager: DefaultsManager, |
| 68 | @Named(InjectionNames.DISPATCHER_DEFAULT) | 70 | @Named(InjectionNames.DISPATCHER_DEFAULT) |
| 69 | private val defaultDispatcher: CoroutineDispatcher, | 71 | private val defaultDispatcher: CoroutineDispatcher, |
| @@ -73,6 +75,7 @@ constructor( | @@ -73,6 +75,7 @@ constructor( | ||
| 73 | private val closeableManager: CloseableManager, | 75 | private val closeableManager: CloseableManager, |
| 74 | private val e2EEManagerFactory: E2EEManager.Factory, | 76 | private val e2EEManagerFactory: E2EEManager.Factory, |
| 75 | private val communicationWorkaround: CommunicationWorkaround, | 77 | private val communicationWorkaround: CommunicationWorkaround, |
| 78 | + val audioProcessingController: AudioProcessingController, | ||
| 76 | ) : RTCEngine.Listener, ParticipantListener { | 79 | ) : RTCEngine.Listener, ParticipantListener { |
| 77 | 80 | ||
| 78 | private lateinit var coroutineScope: CoroutineScope | 81 | private lateinit var coroutineScope: CoroutineScope |
| @@ -181,6 +184,11 @@ constructor( | @@ -181,6 +184,11 @@ constructor( | ||
| 181 | var adaptiveStream: Boolean = false | 184 | var adaptiveStream: Boolean = false |
| 182 | 185 | ||
| 183 | /** | 186 | /** |
| 187 | + * audio processing is enabled | ||
| 188 | + */ | ||
| 189 | + var audioProcessorIsEnabled: Boolean = false | ||
| 190 | + | ||
| 191 | + /** | ||
| 184 | * Dynamically pauses video layers that are not being consumed by any subscribers, | 192 | * Dynamically pauses video layers that are not being consumed by any subscribers, |
| 185 | * significantly reducing publishing CPU and bandwidth usage. | 193 | * significantly reducing publishing CPU and bandwidth usage. |
| 186 | * | 194 | * |
| @@ -203,6 +211,11 @@ constructor( | @@ -203,6 +211,11 @@ constructor( | ||
| 203 | var e2eeOptions: E2EEOptions? = null | 211 | var e2eeOptions: E2EEOptions? = null |
| 204 | 212 | ||
| 205 | /** | 213 | /** |
| 214 | + * @see external audio processing options | ||
| 215 | + */ | ||
| 216 | + var audioProcessorOptions: AudioProcessorOptions? = null | ||
| 217 | + | ||
| 218 | + /** | ||
| 206 | * Default options to use when creating an audio track. | 219 | * Default options to use when creating an audio track. |
| 207 | */ | 220 | */ |
| 208 | var audioTrackCaptureDefaults: LocalAudioTrackOptions by defaultsManager::audioTrackCaptureDefaults | 221 | var audioTrackCaptureDefaults: LocalAudioTrackOptions by defaultsManager::audioTrackCaptureDefaults |
livekit-android-sdk/src/main/java/io/livekit/android/webrtc/CustomAudioProcessingFactory.kt
0 → 100644
| 1 | +/* | ||
| 2 | + * Copyright 2023-2024 LiveKit, Inc. | ||
| 3 | + * | ||
| 4 | + * Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | + * you may not use this file except in compliance with the License. | ||
| 6 | + * You may obtain a copy of the License at | ||
| 7 | + * | ||
| 8 | + * http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | + * | ||
| 10 | + * Unless required by applicable law or agreed to in writing, software | ||
| 11 | + * distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | + * See the License for the specific language governing permissions and | ||
| 14 | + * limitations under the License. | ||
| 15 | + */ | ||
| 16 | + | ||
| 17 | +package io.livekit.android.webrtc | ||
| 18 | + | ||
| 19 | +import io.livekit.android.audio.AudioProcessingController | ||
| 20 | +import io.livekit.android.audio.AudioProcessorInterface | ||
| 21 | +import io.livekit.android.audio.AudioProcessorOptions | ||
| 22 | +import livekit.org.webrtc.AudioProcessingFactory | ||
| 23 | +import livekit.org.webrtc.ExternalAudioProcessingFactory | ||
| 24 | +import java.nio.ByteBuffer | ||
| 25 | + | ||
| 26 | +class CustomAudioProcessingFactory(private val audioProcessorOptions: AudioProcessorOptions) : AudioProcessingController { | ||
| 27 | + | ||
| 28 | + private val externalAudioProcessor = ExternalAudioProcessingFactory() | ||
| 29 | + | ||
| 30 | + init { | ||
| 31 | + if (audioProcessorOptions.capturePostProcessor != null) { | ||
| 32 | + setCapturePostProcessing(audioProcessorOptions.capturePostProcessor) | ||
| 33 | + setBypassForCapturePostProcessing(audioProcessorOptions.capturePostBypass) | ||
| 34 | + } else { | ||
| 35 | + setCapturePostProcessing(null) | ||
| 36 | + setBypassForCapturePostProcessing(false) | ||
| 37 | + } | ||
| 38 | + if (audioProcessorOptions.renderPreProcessor != null) { | ||
| 39 | + setRenderPreProcessing(audioProcessorOptions.renderPreProcessor) | ||
| 40 | + setBypassForRenderPreProcessing(audioProcessorOptions.renderPreBypass) | ||
| 41 | + } else { | ||
| 42 | + setRenderPreProcessing(null) | ||
| 43 | + setBypassForRenderPreProcessing(false) | ||
| 44 | + } | ||
| 45 | + } | ||
| 46 | + | ||
| 47 | + fun getAudioProcessingFactory(): AudioProcessingFactory { | ||
| 48 | + return externalAudioProcessor | ||
| 49 | + } | ||
| 50 | + | ||
| 51 | + override fun setCapturePostProcessing(processing: AudioProcessorInterface?) { | ||
| 52 | + externalAudioProcessor.setCapturePostProcessing( | ||
| 53 | + processing.toAudioProcessing(), | ||
| 54 | + ) | ||
| 55 | + } | ||
| 56 | + | ||
| 57 | + override fun setBypassForCapturePostProcessing(bypass: Boolean) { | ||
| 58 | + externalAudioProcessor.setBypassFlagForCapturePost(bypass) | ||
| 59 | + } | ||
| 60 | + | ||
| 61 | + override fun setRenderPreProcessing(processing: AudioProcessorInterface?) { | ||
| 62 | + externalAudioProcessor.setRenderPreProcessing( | ||
| 63 | + processing.toAudioProcessing(), | ||
| 64 | + ) | ||
| 65 | + } | ||
| 66 | + | ||
| 67 | + override fun setBypassForRenderPreProcessing(bypass: Boolean) { | ||
| 68 | + externalAudioProcessor.setBypassFlagForRenderPre(bypass) | ||
| 69 | + } | ||
| 70 | + | ||
| 71 | + private class AudioProcessingBridge( | ||
| 72 | + var audioProcessing: AudioProcessorInterface? = null, | ||
| 73 | + ) : ExternalAudioProcessingFactory.AudioProcessing { | ||
| 74 | + override fun initialize(sampleRateHz: Int, numChannels: Int) { | ||
| 75 | + audioProcessing?.initializeAudioProcessing(sampleRateHz, numChannels) | ||
| 76 | + } | ||
| 77 | + | ||
| 78 | + override fun reset(newRate: Int) { | ||
| 79 | + audioProcessing?.resetAudioProcessing(newRate) | ||
| 80 | + } | ||
| 81 | + | ||
| 82 | + override fun process(numBands: Int, numFrames: Int, buffer: ByteBuffer?) { | ||
| 83 | + audioProcessing?.processAudio(numBands, numFrames, buffer!!) | ||
| 84 | + } | ||
| 85 | + } | ||
| 86 | + | ||
| 87 | + private fun AudioProcessorInterface?.toAudioProcessing(): ExternalAudioProcessingFactory.AudioProcessing { | ||
| 88 | + return AudioProcessingBridge(this) | ||
| 89 | + } | ||
| 90 | +} |
livekit-android-sdk/src/test/java/io/livekit/android/mock/MockAudioProcessingController.kt
0 → 100644
| 1 | +/* | ||
| 2 | + * Copyright 2024 LiveKit, Inc. | ||
| 3 | + * | ||
| 4 | + * Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | + * you may not use this file except in compliance with the License. | ||
| 6 | + * You may obtain a copy of the License at | ||
| 7 | + * | ||
| 8 | + * http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | + * | ||
| 10 | + * Unless required by applicable law or agreed to in writing, software | ||
| 11 | + * distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | + * See the License for the specific language governing permissions and | ||
| 14 | + * limitations under the License. | ||
| 15 | + */ | ||
| 16 | + | ||
| 17 | +package io.livekit.android.mock | ||
| 18 | + | ||
| 19 | +import io.livekit.android.audio.AudioProcessingController | ||
| 20 | +import io.livekit.android.audio.AudioProcessorInterface | ||
| 21 | + | ||
| 22 | +class MockAudioProcessingController : AudioProcessingController { | ||
| 23 | + override fun setCapturePostProcessing(processing: AudioProcessorInterface?) { | ||
| 24 | + } | ||
| 25 | + | ||
| 26 | + override fun setBypassForCapturePostProcessing(bypass: Boolean) { | ||
| 27 | + } | ||
| 28 | + | ||
| 29 | + override fun setRenderPreProcessing(processing: AudioProcessorInterface?) { | ||
| 30 | + } | ||
| 31 | + | ||
| 32 | + override fun setBypassForRenderPreProcessing(bypass: Boolean) { | ||
| 33 | + } | ||
| 34 | +} |
| @@ -20,8 +20,10 @@ import android.content.Context | @@ -20,8 +20,10 @@ import android.content.Context | ||
| 20 | import android.javax.sdp.SdpFactory | 20 | import android.javax.sdp.SdpFactory |
| 21 | import dagger.Module | 21 | import dagger.Module |
| 22 | import dagger.Provides | 22 | import dagger.Provides |
| 23 | +import io.livekit.android.audio.AudioProcessingController | ||
| 23 | import io.livekit.android.dagger.CapabilitiesGetter | 24 | import io.livekit.android.dagger.CapabilitiesGetter |
| 24 | import io.livekit.android.dagger.InjectionNames | 25 | import io.livekit.android.dagger.InjectionNames |
| 26 | +import io.livekit.android.mock.MockAudioProcessingController | ||
| 25 | import io.livekit.android.mock.MockEglBase | 27 | import io.livekit.android.mock.MockEglBase |
| 26 | import livekit.org.webrtc.* | 28 | import livekit.org.webrtc.* |
| 27 | import javax.inject.Named | 29 | import javax.inject.Named |
| @@ -40,6 +42,11 @@ object TestRTCModule { | @@ -40,6 +42,11 @@ object TestRTCModule { | ||
| 40 | fun eglContext(eglBase: EglBase): EglBase.Context = eglBase.eglBaseContext | 42 | fun eglContext(eglBase: EglBase): EglBase.Context = eglBase.eglBaseContext |
| 41 | 43 | ||
| 42 | @Provides | 44 | @Provides |
| 45 | + fun audioProcessingController(): AudioProcessingController { | ||
| 46 | + return MockAudioProcessingController() | ||
| 47 | + } | ||
| 48 | + | ||
| 49 | + @Provides | ||
| 43 | @Singleton | 50 | @Singleton |
| 44 | fun peerConnectionFactory( | 51 | fun peerConnectionFactory( |
| 45 | appContext: Context, | 52 | appContext: Context, |
| @@ -101,6 +101,7 @@ class RoomTest { | @@ -101,6 +101,7 @@ class RoomTest { | ||
| 101 | closeableManager = CloseableManager(), | 101 | closeableManager = CloseableManager(), |
| 102 | e2EEManagerFactory = e2EEManagerFactory, | 102 | e2EEManagerFactory = e2EEManagerFactory, |
| 103 | communicationWorkaround = NoopCommunicationWorkaround(), | 103 | communicationWorkaround = NoopCommunicationWorkaround(), |
| 104 | + audioProcessingController = MockAudioProcessingController(), | ||
| 104 | ) | 105 | ) |
| 105 | } | 106 | } |
| 106 | 107 |
| @@ -25,8 +25,11 @@ import androidx.lifecycle.LiveData | @@ -25,8 +25,11 @@ import androidx.lifecycle.LiveData | ||
| 25 | import androidx.lifecycle.MutableLiveData | 25 | import androidx.lifecycle.MutableLiveData |
| 26 | import androidx.lifecycle.viewModelScope | 26 | import androidx.lifecycle.viewModelScope |
| 27 | import com.github.ajalt.timberkt.Timber | 27 | import com.github.ajalt.timberkt.Timber |
| 28 | +import io.livekit.android.AudioOptions | ||
| 28 | import io.livekit.android.LiveKit | 29 | import io.livekit.android.LiveKit |
| 30 | +import io.livekit.android.LiveKitOverrides | ||
| 29 | import io.livekit.android.RoomOptions | 31 | import io.livekit.android.RoomOptions |
| 32 | +import io.livekit.android.audio.AudioProcessorOptions | ||
| 30 | import io.livekit.android.audio.AudioSwitchHandler | 33 | import io.livekit.android.audio.AudioSwitchHandler |
| 31 | import io.livekit.android.e2ee.E2EEOptions | 34 | import io.livekit.android.e2ee.E2EEOptions |
| 32 | import io.livekit.android.events.RoomEvent | 35 | import io.livekit.android.events.RoomEvent |
| @@ -56,6 +59,7 @@ class CallViewModel( | @@ -56,6 +59,7 @@ class CallViewModel( | ||
| 56 | application: Application, | 59 | application: Application, |
| 57 | val e2ee: Boolean = false, | 60 | val e2ee: Boolean = false, |
| 58 | val e2eeKey: String? = "", | 61 | val e2eeKey: String? = "", |
| 62 | + val audioProcessorOptions: AudioProcessorOptions? = null, | ||
| 59 | ) : AndroidViewModel(application) { | 63 | ) : AndroidViewModel(application) { |
| 60 | 64 | ||
| 61 | private fun getE2EEOptions(): E2EEOptions? { | 65 | private fun getE2EEOptions(): E2EEOptions? { |
| @@ -67,9 +71,20 @@ class CallViewModel( | @@ -67,9 +71,20 @@ class CallViewModel( | ||
| 67 | return e2eeOptions | 71 | return e2eeOptions |
| 68 | } | 72 | } |
| 69 | 73 | ||
| 74 | + private fun getRoomOptions(): RoomOptions { | ||
| 75 | + return RoomOptions( | ||
| 76 | + adaptiveStream = true, | ||
| 77 | + dynacast = true, | ||
| 78 | + e2eeOptions = getE2EEOptions(), | ||
| 79 | + ) | ||
| 80 | + } | ||
| 81 | + | ||
| 70 | val room = LiveKit.create( | 82 | val room = LiveKit.create( |
| 71 | appContext = application, | 83 | appContext = application, |
| 72 | - options = RoomOptions(adaptiveStream = true, dynacast = true), | 84 | + options = getRoomOptions(), |
| 85 | + overrides = LiveKitOverrides( | ||
| 86 | + audioOptions = AudioOptions(audioProcessorOptions = audioProcessorOptions), | ||
| 87 | + ), | ||
| 73 | ) | 88 | ) |
| 74 | 89 | ||
| 75 | val audioHandler = room.audioHandler as AudioSwitchHandler | 90 | val audioHandler = room.audioHandler as AudioSwitchHandler |
| @@ -103,6 +118,12 @@ class CallViewModel( | @@ -103,6 +118,12 @@ class CallViewModel( | ||
| 103 | private val mutableScreencastEnabled = MutableLiveData(false) | 118 | private val mutableScreencastEnabled = MutableLiveData(false) |
| 104 | val screenshareEnabled = mutableScreencastEnabled.hide() | 119 | val screenshareEnabled = mutableScreencastEnabled.hide() |
| 105 | 120 | ||
| 121 | + private val mutableEnhancedNsEnabled = MutableLiveData(false) | ||
| 122 | + val enhancedNsEnabled = mutableEnhancedNsEnabled.hide() | ||
| 123 | + | ||
| 124 | + private val mutableEnableAudioProcessor = MutableLiveData(true) | ||
| 125 | + val enableAudioProcessor = mutableEnableAudioProcessor.hide() | ||
| 126 | + | ||
| 106 | // Emits a string whenever a data message is received. | 127 | // Emits a string whenever a data message is received. |
| 107 | private val mutableDataReceived = MutableSharedFlow<String>() | 128 | private val mutableDataReceived = MutableSharedFlow<String>() |
| 108 | val dataReceived = mutableDataReceived | 129 | val dataReceived = mutableDataReceived |
| @@ -176,14 +197,36 @@ class CallViewModel( | @@ -176,14 +197,36 @@ class CallViewModel( | ||
| 176 | } | 197 | } |
| 177 | } | 198 | } |
| 178 | 199 | ||
| 200 | + fun toggleEnhancedNs(enabled: Boolean? = null) { | ||
| 201 | + if (enabled != null) { | ||
| 202 | + mutableEnableAudioProcessor.postValue(enabled) | ||
| 203 | + room.audioProcessingController.setBypassForCapturePostProcessing(!enabled) | ||
| 204 | + return | ||
| 205 | + } | ||
| 206 | + | ||
| 207 | + if (room.audioProcessorIsEnabled) { | ||
| 208 | + if (enableAudioProcessor.value == true) { | ||
| 209 | + room.audioProcessingController.setBypassForCapturePostProcessing(true) | ||
| 210 | + mutableEnableAudioProcessor.postValue(false) | ||
| 211 | + } else { | ||
| 212 | + room.audioProcessingController.setBypassForCapturePostProcessing(false) | ||
| 213 | + mutableEnableAudioProcessor.postValue(true) | ||
| 214 | + } | ||
| 215 | + } | ||
| 216 | + } | ||
| 217 | + | ||
| 179 | private suspend fun connectToRoom() { | 218 | private suspend fun connectToRoom() { |
| 180 | try { | 219 | try { |
| 181 | room.e2eeOptions = getE2EEOptions() | 220 | room.e2eeOptions = getE2EEOptions() |
| 221 | + room.audioProcessorOptions = audioProcessorOptions | ||
| 182 | room.connect( | 222 | room.connect( |
| 183 | url = url, | 223 | url = url, |
| 184 | token = token, | 224 | token = token, |
| 185 | ) | 225 | ) |
| 186 | 226 | ||
| 227 | + mutableEnhancedNsEnabled.postValue(room.audioProcessorIsEnabled) | ||
| 228 | + mutableEnableAudioProcessor.postValue(true) | ||
| 229 | + | ||
| 187 | // Create and publish audio/video tracks | 230 | // Create and publish audio/video tracks |
| 188 | val localParticipant = room.localParticipant | 231 | val localParticipant = room.localParticipant |
| 189 | localParticipant.setMicrophoneEnabled(true) | 232 | localParticipant.setMicrophoneEnabled(true) |
| 1 | +<?xml version="1.0" encoding="utf-8"?> | ||
| 2 | +<vector xmlns:android="http://schemas.android.com/apk/res/android" | ||
| 3 | + android:width="32dp" | ||
| 4 | + android:height="32dp" | ||
| 5 | + android:viewportWidth="32" | ||
| 6 | + android:viewportHeight="32"> | ||
| 7 | + | ||
| 8 | + <path | ||
| 9 | + android:strokeColor="#000000" | ||
| 10 | + android:strokeWidth="2" | ||
| 11 | + android:strokeLineJoin="round" | ||
| 12 | + android:strokeMiterLimit="10" | ||
| 13 | + android:strokeLineCap="round" | ||
| 14 | + android:pathData="M 4 14 L 4 18" /> | ||
| 15 | + <path | ||
| 16 | + android:strokeColor="#000000" | ||
| 17 | + android:strokeWidth="2" | ||
| 18 | + android:strokeLineJoin="round" | ||
| 19 | + android:strokeMiterLimit="10" | ||
| 20 | + android:strokeLineCap="round" | ||
| 21 | + android:pathData="M 8 12 L 8 20" /> | ||
| 22 | + <path | ||
| 23 | + android:strokeColor="#000000" | ||
| 24 | + android:strokeWidth="2" | ||
| 25 | + android:strokeLineJoin="round" | ||
| 26 | + android:strokeMiterLimit="10" | ||
| 27 | + android:strokeLineCap="round" | ||
| 28 | + android:pathData="M 12 7 L 12 25" /> | ||
| 29 | + <path | ||
| 30 | + android:strokeColor="#000000" | ||
| 31 | + android:strokeWidth="2" | ||
| 32 | + android:strokeLineJoin="round" | ||
| 33 | + android:strokeMiterLimit="10" | ||
| 34 | + android:strokeLineCap="round" | ||
| 35 | + android:pathData="M 16 14 L 16 18" /> | ||
| 36 | + <path | ||
| 37 | + android:strokeColor="#000000" | ||
| 38 | + android:strokeWidth="2" | ||
| 39 | + android:strokeLineJoin="round" | ||
| 40 | + android:strokeMiterLimit="10" | ||
| 41 | + android:strokeLineCap="round" | ||
| 42 | + android:pathData="M 20 10 L 20 22" /> | ||
| 43 | + <path | ||
| 44 | + android:strokeColor="#000000" | ||
| 45 | + android:strokeWidth="2" | ||
| 46 | + android:strokeLineJoin="round" | ||
| 47 | + android:strokeMiterLimit="10" | ||
| 48 | + android:strokeLineCap="round" | ||
| 49 | + android:pathData="M 24 7 L 24 25" /> | ||
| 50 | + <path | ||
| 51 | + android:strokeColor="#000000" | ||
| 52 | + android:strokeWidth="2" | ||
| 53 | + android:strokeLineJoin="round" | ||
| 54 | + android:strokeMiterLimit="10" | ||
| 55 | + android:strokeLineCap="round" | ||
| 56 | + android:pathData="M 28 14 L 28 18" /> | ||
| 57 | +</vector> |
| 1 | /* | 1 | /* |
| 2 | - * Copyright 2023 LiveKit, Inc. | 2 | + * Copyright 2023-2024 LiveKit, Inc. |
| 3 | * | 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); | 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. | 5 | * you may not use this file except in compliance with the License. |
| @@ -28,22 +28,55 @@ import androidx.appcompat.app.AlertDialog | @@ -28,22 +28,55 @@ import androidx.appcompat.app.AlertDialog | ||
| 28 | import androidx.appcompat.app.AppCompatActivity | 28 | import androidx.appcompat.app.AppCompatActivity |
| 29 | import androidx.lifecycle.lifecycleScope | 29 | import androidx.lifecycle.lifecycleScope |
| 30 | import androidx.recyclerview.widget.LinearLayoutManager | 30 | import androidx.recyclerview.widget.LinearLayoutManager |
| 31 | +import com.github.ajalt.timberkt.Timber | ||
| 31 | import com.xwray.groupie.GroupieAdapter | 32 | import com.xwray.groupie.GroupieAdapter |
| 33 | +import io.livekit.android.audio.AudioProcessorInterface | ||
| 34 | +import io.livekit.android.audio.AudioProcessorOptions | ||
| 32 | import io.livekit.android.sample.common.R | 35 | import io.livekit.android.sample.common.R |
| 33 | import io.livekit.android.sample.databinding.CallActivityBinding | 36 | import io.livekit.android.sample.databinding.CallActivityBinding |
| 37 | +import io.livekit.android.sample.dialog.showAudioProcessorSwitchDialog | ||
| 34 | import io.livekit.android.sample.dialog.showDebugMenuDialog | 38 | import io.livekit.android.sample.dialog.showDebugMenuDialog |
| 35 | import io.livekit.android.sample.dialog.showSelectAudioDeviceDialog | 39 | import io.livekit.android.sample.dialog.showSelectAudioDeviceDialog |
| 36 | import kotlinx.coroutines.flow.collectLatest | 40 | import kotlinx.coroutines.flow.collectLatest |
| 37 | import kotlinx.parcelize.Parcelize | 41 | import kotlinx.parcelize.Parcelize |
| 42 | +import java.nio.ByteBuffer | ||
| 38 | 43 | ||
| 39 | class CallActivity : AppCompatActivity() { | 44 | class CallActivity : AppCompatActivity() { |
| 40 | 45 | ||
| 41 | - val viewModel: CallViewModel by viewModelByFactory { | 46 | + private val viewModel: CallViewModel by viewModelByFactory { |
| 42 | val args = intent.getParcelableExtra<BundleArgs>(KEY_ARGS) | 47 | val args = intent.getParcelableExtra<BundleArgs>(KEY_ARGS) |
| 43 | ?: throw NullPointerException("args is null!") | 48 | ?: throw NullPointerException("args is null!") |
| 44 | - CallViewModel(args.url, args.token, application, args.e2ee, args.e2eeKey) | 49 | + |
| 50 | + val audioProcessor = object : AudioProcessorInterface { | ||
| 51 | + override fun isEnabled(): Boolean { | ||
| 52 | + Timber.d { "${getName()} isEnabled" } | ||
| 53 | + return true | ||
| 54 | + } | ||
| 55 | + | ||
| 56 | + override fun getName(): String { | ||
| 57 | + return "fake_noise_cancellation" | ||
| 58 | + } | ||
| 59 | + | ||
| 60 | + override fun initializeAudioProcessing(sampleRateHz: Int, numChannels: Int) { | ||
| 61 | + Timber.d { "${getName()} initialize" } | ||
| 62 | + } | ||
| 63 | + | ||
| 64 | + override fun resetAudioProcessing(newRate: Int) { | ||
| 65 | + Timber.d { "${getName()} reset" } | ||
| 66 | + } | ||
| 67 | + | ||
| 68 | + override fun processAudio(numBands: Int, numFrames: Int, buffer: ByteBuffer) { | ||
| 69 | + Timber.d { "${getName()} process" } | ||
| 70 | + } | ||
| 71 | + } | ||
| 72 | + | ||
| 73 | + val audioProcessorOptions = AudioProcessorOptions( | ||
| 74 | + capturePostProcessor = audioProcessor, | ||
| 75 | + ) | ||
| 76 | + | ||
| 77 | + CallViewModel(args.url, args.token, application, args.e2ee, args.e2eeKey, audioProcessorOptions) | ||
| 45 | } | 78 | } |
| 46 | - lateinit var binding: CallActivityBinding | 79 | + private lateinit var binding: CallActivityBinding |
| 47 | private val screenCaptureIntentLauncher = | 80 | private val screenCaptureIntentLauncher = |
| 48 | registerForActivityResult( | 81 | registerForActivityResult( |
| 49 | ActivityResultContracts.StartActivityForResult(), | 82 | ActivityResultContracts.StartActivityForResult(), |
| @@ -146,6 +179,18 @@ class CallActivity : AppCompatActivity() { | @@ -146,6 +179,18 @@ class CallActivity : AppCompatActivity() { | ||
| 146 | .show() | 179 | .show() |
| 147 | } | 180 | } |
| 148 | 181 | ||
| 182 | + viewModel.enhancedNsEnabled.observe(this) { enabled -> | ||
| 183 | + binding.enhancedNs.visibility = if (enabled) { | ||
| 184 | + android.view.View.VISIBLE | ||
| 185 | + } else { | ||
| 186 | + android.view.View.GONE | ||
| 187 | + } | ||
| 188 | + } | ||
| 189 | + | ||
| 190 | + binding.enhancedNs.setOnClickListener { | ||
| 191 | + showAudioProcessorSwitchDialog(viewModel) | ||
| 192 | + } | ||
| 193 | + | ||
| 149 | binding.exit.setOnClickListener { finish() } | 194 | binding.exit.setOnClickListener { finish() } |
| 150 | 195 | ||
| 151 | // Controls row 2 | 196 | // Controls row 2 |
| 1 | +/* | ||
| 2 | + * Copyright 2023-2024 LiveKit, Inc. | ||
| 3 | + * | ||
| 4 | + * Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | + * you may not use this file except in compliance with the License. | ||
| 6 | + * You may obtain a copy of the License at | ||
| 7 | + * | ||
| 8 | + * http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | + * | ||
| 10 | + * Unless required by applicable law or agreed to in writing, software | ||
| 11 | + * distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | + * See the License for the specific language governing permissions and | ||
| 14 | + * limitations under the License. | ||
| 15 | + */ | ||
| 16 | + | ||
| 17 | +package io.livekit.android.sample.dialog | ||
| 18 | + | ||
| 19 | +import android.R | ||
| 20 | +import android.app.Activity | ||
| 21 | +import android.app.AlertDialog | ||
| 22 | +import android.widget.ArrayAdapter | ||
| 23 | +import io.livekit.android.sample.CallViewModel | ||
| 24 | + | ||
| 25 | +fun Activity.showAudioProcessorSwitchDialog(callViewModel: CallViewModel) { | ||
| 26 | + var name = callViewModel.audioProcessorOptions?.capturePostProcessor?.getName() | ||
| 27 | + var enabled = if (callViewModel.enableAudioProcessor.value == true) "On" else "Off" | ||
| 28 | + val builder = with(AlertDialog.Builder(this)) { | ||
| 29 | + setTitle("AudioProcessor for mic: \n[$name] is $enabled") | ||
| 30 | + | ||
| 31 | + val arrayAdapter = ArrayAdapter<String>(this@showAudioProcessorSwitchDialog, R.layout.select_dialog_item) | ||
| 32 | + arrayAdapter.add("On") | ||
| 33 | + arrayAdapter.add("Off") | ||
| 34 | + setAdapter(arrayAdapter) { dialog, index -> | ||
| 35 | + when (index) { | ||
| 36 | + 0 -> callViewModel.toggleEnhancedNs(true) | ||
| 37 | + 1 -> callViewModel.toggleEnhancedNs(false) | ||
| 38 | + } | ||
| 39 | + dialog.dismiss() | ||
| 40 | + } | ||
| 41 | + } | ||
| 42 | + builder.show() | ||
| 43 | +} |
| @@ -81,6 +81,17 @@ | @@ -81,6 +81,17 @@ | ||
| 81 | app:tint="@android:color/white" /> | 81 | app:tint="@android:color/white" /> |
| 82 | 82 | ||
| 83 | <ImageView | 83 | <ImageView |
| 84 | + android:id="@+id/enhanced_ns" | ||
| 85 | + android:visibility="gone" | ||
| 86 | + android:layout_width="@dimen/control_size" | ||
| 87 | + android:layout_height="@dimen/control_size" | ||
| 88 | + android:layout_weight="1" | ||
| 89 | + android:background="?android:attr/selectableItemBackground" | ||
| 90 | + android:padding="@dimen/control_padding" | ||
| 91 | + android:src="@drawable/voice_wave_24" | ||
| 92 | + app:tint="@android:color/white" /> | ||
| 93 | + | ||
| 94 | + <ImageView | ||
| 84 | android:id="@+id/exit" | 95 | android:id="@+id/exit" |
| 85 | android:layout_width="@dimen/control_size" | 96 | android:layout_width="@dimen/control_size" |
| 86 | android:layout_height="@dimen/control_size" | 97 | android:layout_height="@dimen/control_size" |
-
请 注册 或 登录 后发表评论