Committed by
GitHub
Custom webrtc and simulcast (#11)
* move to custom webrtc 92.4515.01 * simulcast video encoder factory with wrapper * set video encodings and simulcast on transceiver * rtpparameter converter method * add in comment about dimensions * update gradle
正在显示
11 个修改的文件
包含
411 行增加
和
29 行删除
| @@ -26,5 +26,10 @@ | @@ -26,5 +26,10 @@ | ||
| 26 | <option name="name" value="MavenRepo" /> | 26 | <option name="name" value="MavenRepo" /> |
| 27 | <option name="url" value="https://repo.maven.apache.org/maven2/" /> | 27 | <option name="url" value="https://repo.maven.apache.org/maven2/" /> |
| 28 | </remote-repository> | 28 | </remote-repository> |
| 29 | + <remote-repository> | ||
| 30 | + <option name="id" value="maven" /> | ||
| 31 | + <option name="name" value="maven" /> | ||
| 32 | + <option name="url" value="https://jitpack.io" /> | ||
| 33 | + </remote-repository> | ||
| 29 | </component> | 34 | </component> |
| 30 | </project> | 35 | </project> |
| @@ -11,10 +11,9 @@ buildscript { | @@ -11,10 +11,9 @@ buildscript { | ||
| 11 | google() | 11 | google() |
| 12 | mavenCentral() | 12 | mavenCentral() |
| 13 | jcenter() | 13 | jcenter() |
| 14 | - | ||
| 15 | } | 14 | } |
| 16 | dependencies { | 15 | dependencies { |
| 17 | - classpath 'com.android.tools.build:gradle:7.0.2' | 16 | + classpath 'com.android.tools.build:gradle:7.0.3' |
| 18 | classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version" | 17 | classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version" |
| 19 | classpath "org.jetbrains.kotlin:kotlin-serialization:$kotlin_version" | 18 | classpath "org.jetbrains.kotlin:kotlin-serialization:$kotlin_version" |
| 20 | classpath "org.jetbrains.dokka:dokka-gradle-plugin:$dokka_version" | 19 | classpath "org.jetbrains.dokka:dokka-gradle-plugin:$dokka_version" |
| @@ -31,6 +30,7 @@ subprojects { | @@ -31,6 +30,7 @@ subprojects { | ||
| 31 | google() | 30 | google() |
| 32 | mavenCentral() | 31 | mavenCentral() |
| 33 | jcenter() | 32 | jcenter() |
| 33 | + maven { url 'https://jitpack.io' } | ||
| 34 | } | 34 | } |
| 35 | } | 35 | } |
| 36 | 36 |
| @@ -99,7 +99,7 @@ dependencies { | @@ -99,7 +99,7 @@ dependencies { | ||
| 99 | implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version" | 99 | implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version" |
| 100 | implementation deps.kotlinx_coroutines | 100 | implementation deps.kotlinx_coroutines |
| 101 | implementation 'org.jetbrains.kotlinx:kotlinx-serialization-json:1.1.0' | 101 | implementation 'org.jetbrains.kotlinx:kotlinx-serialization-json:1.1.0' |
| 102 | - api 'org.webrtc:google-webrtc:1.0.32006' | 102 | + api 'com.github.webrtc-sdk:android:92.4515.01' |
| 103 | api "com.squareup.okhttp3:okhttp:4.9.0" | 103 | api "com.squareup.okhttp3:okhttp:4.9.0" |
| 104 | implementation "com.google.protobuf:protobuf-java:${versions.protobuf}" | 104 | implementation "com.google.protobuf:protobuf-java:${versions.protobuf}" |
| 105 | implementation "com.google.protobuf:protobuf-java-util:${versions.protobuf}" | 105 | implementation "com.google.protobuf:protobuf-java-util:${versions.protobuf}" |
| @@ -4,6 +4,7 @@ import android.content.Context | @@ -4,6 +4,7 @@ import android.content.Context | ||
| 4 | import dagger.Module | 4 | import dagger.Module |
| 5 | import dagger.Provides | 5 | import dagger.Provides |
| 6 | import io.livekit.android.util.LKLog | 6 | import io.livekit.android.util.LKLog |
| 7 | +import io.livekit.android.webrtc.SimulcastVideoEncoderFactoryWrapper | ||
| 7 | import org.webrtc.* | 8 | import org.webrtc.* |
| 8 | import org.webrtc.audio.AudioDeviceModule | 9 | import org.webrtc.audio.AudioDeviceModule |
| 9 | import org.webrtc.audio.JavaAudioDeviceModule | 10 | import org.webrtc.audio.JavaAudioDeviceModule |
| @@ -103,10 +104,10 @@ class RTCModule { | @@ -103,10 +104,10 @@ class RTCModule { | ||
| 103 | ): VideoEncoderFactory { | 104 | ): VideoEncoderFactory { |
| 104 | 105 | ||
| 105 | return if (videoHwAccel) { | 106 | return if (videoHwAccel) { |
| 106 | - DefaultVideoEncoderFactory( | 107 | + SimulcastVideoEncoderFactoryWrapper( |
| 107 | eglContext, | 108 | eglContext, |
| 108 | - true, | ||
| 109 | - true | 109 | + enableIntelVp8Encoder = true, |
| 110 | + enableH264HighProfile = true, | ||
| 110 | ) | 111 | ) |
| 111 | } else { | 112 | } else { |
| 112 | SoftwareVideoEncoderFactory() | 113 | SoftwareVideoEncoderFactory() |
| @@ -149,6 +150,6 @@ class RTCModule { | @@ -149,6 +150,6 @@ class RTCModule { | ||
| 149 | 150 | ||
| 150 | @Provides | 151 | @Provides |
| 151 | @Named(InjectionNames.OPTIONS_VIDEO_HW_ACCEL) | 152 | @Named(InjectionNames.OPTIONS_VIDEO_HW_ACCEL) |
| 152 | - fun videoHwAccel() = false | 153 | + fun videoHwAccel() = true |
| 153 | } | 154 | } |
| 154 | } | 155 | } |
| @@ -72,4 +72,7 @@ class PublisherTransportObserver( | @@ -72,4 +72,7 @@ class PublisherTransportObserver( | ||
| 72 | override fun onAddTrack(p0: RtpReceiver?, p1: Array<out MediaStream>?) { | 72 | override fun onAddTrack(p0: RtpReceiver?, p1: Array<out MediaStream>?) { |
| 73 | } | 73 | } |
| 74 | 74 | ||
| 75 | + override fun onRemoveTrack(p0: RtpReceiver?) { | ||
| 76 | + } | ||
| 77 | + | ||
| 75 | } | 78 | } |
| @@ -25,6 +25,9 @@ class SubscriberTransportObserver( | @@ -25,6 +25,9 @@ class SubscriberTransportObserver( | ||
| 25 | engine.listener?.onAddTrack(track, streams) | 25 | engine.listener?.onAddTrack(track, streams) |
| 26 | } | 26 | } |
| 27 | 27 | ||
| 28 | + override fun onRemoveTrack(p0: RtpReceiver?) { | ||
| 29 | + } | ||
| 30 | + | ||
| 28 | override fun onTrack(transceiver: RtpTransceiver) { | 31 | override fun onTrack(transceiver: RtpTransceiver) { |
| 29 | when (transceiver.mediaType) { | 32 | when (transceiver.mediaType) { |
| 30 | MediaStreamTrack.MediaType.MEDIA_TYPE_AUDIO -> LKLog.v { "peerconn started receiving audio" } | 33 | MediaStreamTrack.MediaType.MEDIA_TYPE_AUDIO -> LKLog.v { "peerconn started receiving audio" } |
| 1 | package io.livekit.android.room.participant | 1 | package io.livekit.android.room.participant |
| 2 | 2 | ||
| 3 | import android.content.Context | 3 | import android.content.Context |
| 4 | +import android.media.MediaCodecInfo | ||
| 4 | import com.google.protobuf.ByteString | 5 | import com.google.protobuf.ByteString |
| 5 | import dagger.assisted.Assisted | 6 | import dagger.assisted.Assisted |
| 6 | import dagger.assisted.AssistedFactory | 7 | import dagger.assisted.AssistedFactory |
| @@ -9,9 +10,8 @@ import io.livekit.android.room.RTCEngine | @@ -9,9 +10,8 @@ import io.livekit.android.room.RTCEngine | ||
| 9 | import io.livekit.android.room.track.* | 10 | import io.livekit.android.room.track.* |
| 10 | import io.livekit.android.util.LKLog | 11 | import io.livekit.android.util.LKLog |
| 11 | import livekit.LivekitModels | 12 | import livekit.LivekitModels |
| 12 | -import livekit.LivekitRtc | ||
| 13 | import org.webrtc.* | 13 | import org.webrtc.* |
| 14 | -import java.nio.ByteBuffer | 14 | +import kotlin.math.abs |
| 15 | 15 | ||
| 16 | class LocalParticipant | 16 | class LocalParticipant |
| 17 | @AssistedInject | 17 | @AssistedInject |
| @@ -93,6 +93,7 @@ internal constructor( | @@ -93,6 +93,7 @@ internal constructor( | ||
| 93 | 93 | ||
| 94 | suspend fun publishVideoTrack( | 94 | suspend fun publishVideoTrack( |
| 95 | track: LocalVideoTrack, | 95 | track: LocalVideoTrack, |
| 96 | + options: VideoTrackPublishOptions = VideoTrackPublishOptions(), | ||
| 96 | publishListener: PublishListener? = null | 97 | publishListener: PublishListener? = null |
| 97 | ) { | 98 | ) { |
| 98 | if (localTrackPublications.any { it.track == track }) { | 99 | if (localTrackPublications.any { it.track == track }) { |
| @@ -101,13 +102,18 @@ internal constructor( | @@ -101,13 +102,18 @@ internal constructor( | ||
| 101 | } | 102 | } |
| 102 | 103 | ||
| 103 | val cid = track.rtcTrack.id() | 104 | val cid = track.rtcTrack.id() |
| 104 | - val trackInfo = | ||
| 105 | - engine.addTrack(cid = cid, name = track.name, kind = LivekitModels.TrackType.VIDEO, dimensions = track.dimensions) | 105 | + val trackInfo = engine.addTrack( |
| 106 | + cid = cid, | ||
| 107 | + name = track.name, | ||
| 108 | + kind = LivekitModels.TrackType.VIDEO, | ||
| 109 | + dimensions = track.dimensions | ||
| 110 | + ) | ||
| 111 | + val encodings = computeVideoEncodings(track.dimensions, options) | ||
| 106 | val transInit = RtpTransceiver.RtpTransceiverInit( | 112 | val transInit = RtpTransceiver.RtpTransceiverInit( |
| 107 | RtpTransceiver.RtpTransceiverDirection.SEND_ONLY, | 113 | RtpTransceiver.RtpTransceiverDirection.SEND_ONLY, |
| 108 | - listOf(this.sid) | 114 | + listOf(this.sid), |
| 115 | + encodings | ||
| 109 | ) | 116 | ) |
| 110 | - // TODO: video encodings & simulcast | ||
| 111 | val transceiver = engine.publisher.peerConnection.addTransceiver(track.rtcTrack, transInit) | 117 | val transceiver = engine.publisher.peerConnection.addTransceiver(track.rtcTrack, transInit) |
| 112 | track.transceiver = transceiver | 118 | track.transceiver = transceiver |
| 113 | 119 | ||
| @@ -116,11 +122,69 @@ internal constructor( | @@ -116,11 +122,69 @@ internal constructor( | ||
| 116 | return | 122 | return |
| 117 | } | 123 | } |
| 118 | 124 | ||
| 125 | + // TODO: enable setting preferred codec | ||
| 126 | + | ||
| 119 | val publication = LocalTrackPublication(trackInfo, track, this) | 127 | val publication = LocalTrackPublication(trackInfo, track, this) |
| 120 | addTrackPublication(publication) | 128 | addTrackPublication(publication) |
| 121 | publishListener?.onPublishSuccess(publication) | 129 | publishListener?.onPublishSuccess(publication) |
| 122 | } | 130 | } |
| 123 | 131 | ||
| 132 | + private fun computeVideoEncodings( | ||
| 133 | + dimensions: Track.Dimensions, | ||
| 134 | + options: VideoTrackPublishOptions | ||
| 135 | + ): List<RtpParameters.Encoding> { | ||
| 136 | + val (width, height) = dimensions | ||
| 137 | + var encoding = options.videoEncoding | ||
| 138 | + val simulcast = options.simulcast | ||
| 139 | + | ||
| 140 | + if ((encoding == null && !simulcast) || width == 0 || height == 0) { | ||
| 141 | + return emptyList() | ||
| 142 | + } | ||
| 143 | + | ||
| 144 | + if (encoding == null) { | ||
| 145 | + encoding = determineAppropriateEncoding(width, height) | ||
| 146 | + LKLog.d { "using video encoding: $encoding" } | ||
| 147 | + } | ||
| 148 | + | ||
| 149 | + val encodings = mutableListOf<RtpParameters.Encoding>() | ||
| 150 | + if (simulcast) { | ||
| 151 | + encodings.add(encoding.toRtpEncoding("f")) | ||
| 152 | + | ||
| 153 | + val presets = presetsForResolution(width, height) | ||
| 154 | + val midPreset = presets[1] | ||
| 155 | + val lowPreset = presets[0] | ||
| 156 | + | ||
| 157 | + // if resolution is high enough, we send both h and q res. | ||
| 158 | + // otherwise only send h | ||
| 159 | + if (width >= 960) { | ||
| 160 | + encodings.add(midPreset.encoding.toRtpEncoding("h", 2.0)) | ||
| 161 | + encodings.add(lowPreset.encoding.toRtpEncoding("q", 4.0)) | ||
| 162 | + } else { | ||
| 163 | + encodings.add(lowPreset.encoding.toRtpEncoding("h", 2.0)) | ||
| 164 | + } | ||
| 165 | + } else { | ||
| 166 | + encodings.add(encoding.toRtpEncoding()) | ||
| 167 | + } | ||
| 168 | + return encodings | ||
| 169 | + } | ||
| 170 | + | ||
| 171 | + private fun determineAppropriateEncoding(width: Int, height: Int): VideoEncoding { | ||
| 172 | + val presets = presetsForResolution(width, height) | ||
| 173 | + | ||
| 174 | + return presets | ||
| 175 | + .last { width >= it.capture.width && height >= it.capture.height } | ||
| 176 | + .encoding | ||
| 177 | + } | ||
| 178 | + | ||
| 179 | + private fun presetsForResolution(width: Int, height: Int): List<VideoPreset> { | ||
| 180 | + val aspectRatio = width.toFloat() / height | ||
| 181 | + if (abs(aspectRatio - 16f / 9f) < abs(aspectRatio - 4f / 3f)) { | ||
| 182 | + return PRESETS_16_9 | ||
| 183 | + } else { | ||
| 184 | + return PRESETS_4_3 | ||
| 185 | + } | ||
| 186 | + } | ||
| 187 | + | ||
| 124 | fun unpublishTrack(track: Track) { | 188 | fun unpublishTrack(track: Track) { |
| 125 | val publication = localTrackPublications.firstOrNull { it.track == track } | 189 | val publication = localTrackPublications.firstOrNull { it.track == track } |
| 126 | if (publication === null) { | 190 | if (publication === null) { |
| @@ -202,4 +266,38 @@ internal constructor( | @@ -202,4 +266,38 @@ internal constructor( | ||
| 202 | interface Factory { | 266 | interface Factory { |
| 203 | fun create(info: LivekitModels.ParticipantInfo): LocalParticipant | 267 | fun create(info: LivekitModels.ParticipantInfo): LocalParticipant |
| 204 | } | 268 | } |
| 269 | + | ||
| 270 | + companion object { | ||
| 271 | + private val PRESETS_16_9 = listOf( | ||
| 272 | + VideoPreset169.QVGA, | ||
| 273 | + VideoPreset169.VGA, | ||
| 274 | + VideoPreset169.QHD, | ||
| 275 | + VideoPreset169.HD, | ||
| 276 | + VideoPreset169.FHD | ||
| 277 | + ) | ||
| 278 | + | ||
| 279 | + private val PRESETS_4_3 = listOf( | ||
| 280 | + VideoPreset43.QVGA, | ||
| 281 | + VideoPreset43.VGA, | ||
| 282 | + VideoPreset43.QHD, | ||
| 283 | + VideoPreset43.HD, | ||
| 284 | + VideoPreset43.FHD | ||
| 285 | + ) | ||
| 286 | + } | ||
| 205 | } | 287 | } |
| 288 | + | ||
| 289 | +interface TrackPublishOptions { | ||
| 290 | + val name: String? | ||
| 291 | +} | ||
| 292 | + | ||
| 293 | +data class VideoTrackPublishOptions( | ||
| 294 | + override val name: String? = null, | ||
| 295 | + val videoEncoding: VideoEncoding? = null, | ||
| 296 | + //val videoCodec: VideoCodec? = null, | ||
| 297 | + val simulcast: Boolean = false | ||
| 298 | +) : TrackPublishOptions | ||
| 299 | + | ||
| 300 | +data class AudioTrackPublishOptions( | ||
| 301 | + override val name: String? = null, | ||
| 302 | + val audioBitrate: Int? = null, | ||
| 303 | +) : TrackPublishOptions |
| @@ -24,8 +24,12 @@ class LocalVideoTrack( | @@ -24,8 +24,12 @@ class LocalVideoTrack( | ||
| 24 | override var rtcTrack: org.webrtc.VideoTrack = rtcTrack | 24 | override var rtcTrack: org.webrtc.VideoTrack = rtcTrack |
| 25 | internal set | 25 | internal set |
| 26 | 26 | ||
| 27 | - val dimensions: Dimensions = | ||
| 28 | - Dimensions(options.captureParams.width, options.captureParams.height) | 27 | + /** |
| 28 | + * Note: these dimensions are only requested params, and may differ | ||
| 29 | + * from the actual capture format used by the camera. | ||
| 30 | + */ | ||
| 31 | + val dimensions: Dimensions | ||
| 32 | + get() = Dimensions(options.captureParams.width, options.captureParams.height) | ||
| 29 | 33 | ||
| 30 | internal var transceiver: RtpTransceiver? = null | 34 | internal var transceiver: RtpTransceiver? = null |
| 31 | private val sender: RtpSender? | 35 | private val sender: RtpSender? |
| 1 | package io.livekit.android.room.track | 1 | package io.livekit.android.room.track |
| 2 | 2 | ||
| 3 | +import org.webrtc.RtpParameters | ||
| 4 | + | ||
| 3 | class LocalVideoTrackOptions( | 5 | class LocalVideoTrackOptions( |
| 4 | var isScreencast: Boolean = false, | 6 | var isScreencast: Boolean = false, |
| 5 | var position: CameraPosition = CameraPosition.FRONT, | 7 | var position: CameraPosition = CameraPosition.FRONT, |
| 6 | - var captureParams: VideoCaptureParameter = VideoPreset.QHD.capture | 8 | + var captureParams: VideoCaptureParameter = VideoPreset169.QHD.capture |
| 7 | ) | 9 | ) |
| 8 | 10 | ||
| 9 | -class VideoCaptureParameter( | 11 | +data class VideoCaptureParameter( |
| 10 | val width: Int, | 12 | val width: Int, |
| 11 | val height: Int, | 13 | val height: Int, |
| 12 | val maxFps: Int, | 14 | val maxFps: Int, |
| 13 | ) | 15 | ) |
| 14 | 16 | ||
| 15 | -class VideoEncoding( | 17 | +data class VideoEncoding( |
| 16 | val maxBitrate: Int, | 18 | val maxBitrate: Int, |
| 17 | val maxFps: Int, | 19 | val maxFps: Int, |
| 18 | -) | 20 | +) { |
| 21 | + fun toRtpEncoding( | ||
| 22 | + rid: String? = null, | ||
| 23 | + scaleDownBy: Double = 1.0, | ||
| 24 | + ): RtpParameters.Encoding { | ||
| 25 | + return RtpParameters.Encoding(rid, true, scaleDownBy).apply { | ||
| 26 | + numTemporalLayers = 1 | ||
| 27 | + maxBitrateBps = maxBitrate | ||
| 28 | + maxFramerate = maxFps | ||
| 29 | + | ||
| 30 | + // only set on the full track | ||
| 31 | + if (scaleDownBy == 1.0) { | ||
| 32 | + networkPriority = 3 // high, from priority.h in webrtc | ||
| 33 | + bitratePriority = 4.0 | ||
| 34 | + } else { | ||
| 35 | + networkPriority = 1 // low, from priority.h in webrtc | ||
| 36 | + bitratePriority = 1.0 | ||
| 37 | + } | ||
| 38 | + | ||
| 39 | + } | ||
| 40 | + } | ||
| 41 | +} | ||
| 42 | + | ||
| 43 | +enum class VideoCodec(val codecName: String) { | ||
| 44 | + VP8("vp8"), | ||
| 45 | + H264("h264"), | ||
| 46 | +} | ||
| 19 | 47 | ||
| 20 | enum class CameraPosition { | 48 | enum class CameraPosition { |
| 21 | FRONT, | 49 | FRONT, |
| 22 | BACK | 50 | BACK |
| 23 | } | 51 | } |
| 24 | 52 | ||
| 53 | +interface VideoPreset { | ||
| 54 | + val capture: VideoCaptureParameter | ||
| 55 | + val encoding: VideoEncoding | ||
| 56 | +} | ||
| 57 | + | ||
| 25 | /** | 58 | /** |
| 26 | - * Video presets along with suggested bitrates | 59 | + * 16:9 Video presets along with suggested bitrates |
| 27 | */ | 60 | */ |
| 28 | -enum class VideoPreset( | ||
| 29 | - val capture: VideoCaptureParameter, | ||
| 30 | - val encoding: VideoEncoding, | ||
| 31 | -) { | 61 | +enum class VideoPreset169( |
| 62 | + override val capture: VideoCaptureParameter, | ||
| 63 | + override val encoding: VideoEncoding, | ||
| 64 | +) : VideoPreset { | ||
| 32 | QVGA( | 65 | QVGA( |
| 33 | - VideoCaptureParameter(320, 240, 15), | ||
| 34 | - VideoEncoding(100_000, 15), | 66 | + VideoCaptureParameter(320, 180, 15), |
| 67 | + VideoEncoding(125_000, 15), | ||
| 35 | ), | 68 | ), |
| 36 | VGA( | 69 | VGA( |
| 37 | VideoCaptureParameter(640, 360, 30), | 70 | VideoCaptureParameter(640, 360, 30), |
| @@ -39,15 +72,43 @@ enum class VideoPreset( | @@ -39,15 +72,43 @@ enum class VideoPreset( | ||
| 39 | ), | 72 | ), |
| 40 | QHD( | 73 | QHD( |
| 41 | VideoCaptureParameter(960, 540, 30), | 74 | VideoCaptureParameter(960, 540, 30), |
| 42 | - VideoEncoding(700_000, 30), | 75 | + VideoEncoding(800_000, 30), |
| 43 | ), | 76 | ), |
| 44 | HD( | 77 | HD( |
| 45 | VideoCaptureParameter(1280, 720, 30), | 78 | VideoCaptureParameter(1280, 720, 30), |
| 46 | - VideoEncoding(2_000_000, 30), | 79 | + VideoEncoding(2_500_000, 30), |
| 47 | ), | 80 | ), |
| 48 | FHD( | 81 | FHD( |
| 49 | VideoCaptureParameter(1920, 1080, 30), | 82 | VideoCaptureParameter(1920, 1080, 30), |
| 50 | VideoEncoding(4_000_000, 30), | 83 | VideoEncoding(4_000_000, 30), |
| 51 | ) | 84 | ) |
| 85 | +} | ||
| 52 | 86 | ||
| 87 | +/** | ||
| 88 | + * 4:3 Video presets along with suggested bitrates | ||
| 89 | + */ | ||
| 90 | +enum class VideoPreset43( | ||
| 91 | + override val capture: VideoCaptureParameter, | ||
| 92 | + override val encoding: VideoEncoding, | ||
| 93 | +) : VideoPreset { | ||
| 94 | + QVGA( | ||
| 95 | + VideoCaptureParameter(240, 180, 15), | ||
| 96 | + VideoEncoding(100_000, 15), | ||
| 97 | + ), | ||
| 98 | + VGA( | ||
| 99 | + VideoCaptureParameter(480, 360, 30), | ||
| 100 | + VideoEncoding(320_000, 30), | ||
| 101 | + ), | ||
| 102 | + QHD( | ||
| 103 | + VideoCaptureParameter(720, 540, 30), | ||
| 104 | + VideoEncoding(640_000, 30), | ||
| 105 | + ), | ||
| 106 | + HD( | ||
| 107 | + VideoCaptureParameter(960, 720, 30), | ||
| 108 | + VideoEncoding(2_000_000, 30), | ||
| 109 | + ), | ||
| 110 | + FHD( | ||
| 111 | + VideoCaptureParameter(1440, 1080, 30), | ||
| 112 | + VideoEncoding(3_200_000, 30), | ||
| 113 | + ) | ||
| 53 | } | 114 | } |
| @@ -44,7 +44,7 @@ open class Track( | @@ -44,7 +44,7 @@ open class Track( | ||
| 44 | } | 44 | } |
| 45 | } | 45 | } |
| 46 | 46 | ||
| 47 | - class Dimensions(var width: Int, var height: Int) | 47 | + data class Dimensions(var width: Int, var height: Int) |
| 48 | 48 | ||
| 49 | open fun stop() { | 49 | open fun stop() { |
| 50 | rtcTrack.setEnabled(false) | 50 | rtcTrack.setEnabled(false) |
livekit-android-sdk/src/main/java/io/livekit/android/webrtc/SimulcastVideoEncoderFactoryWrapper.kt
0 → 100644
| 1 | +package io.livekit.android.webrtc | ||
| 2 | + | ||
| 3 | +import io.livekit.android.util.LKLog | ||
| 4 | +import org.webrtc.* | ||
| 5 | +import java.util.concurrent.* | ||
| 6 | + | ||
| 7 | +/* | ||
| 8 | +Copyright 2017, Lyo Kato <lyo.kato at gmail.com> (Original Author) | ||
| 9 | +Copyright 2017-2021, Shiguredo Inc. | ||
| 10 | + | ||
| 11 | +Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 12 | +you may not use this file except in compliance with the License. | ||
| 13 | +You may obtain a copy of the License at | ||
| 14 | + | ||
| 15 | + http://www.apache.org/licenses/LICENSE-2.0 | ||
| 16 | + | ||
| 17 | +Unless required by applicable law or agreed to in writing, software | ||
| 18 | +distributed under the License is distributed on an "AS IS" BASIS, | ||
| 19 | +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 20 | +See the License for the specific language governing permissions and | ||
| 21 | +limitations under the License. | ||
| 22 | + */ | ||
| 23 | +internal class SimulcastVideoEncoderFactoryWrapper( | ||
| 24 | + sharedContext: EglBase.Context?, | ||
| 25 | + enableIntelVp8Encoder: Boolean, | ||
| 26 | + enableH264HighProfile: Boolean | ||
| 27 | +) : VideoEncoderFactory { | ||
| 28 | + | ||
| 29 | + /** | ||
| 30 | + * Factory that prioritizes software encoder. | ||
| 31 | + * | ||
| 32 | + * When the selected codec can't be handled by the software encoder, | ||
| 33 | + * it uses the hardware encoder as a fallback. However, this class is | ||
| 34 | + * primarily used to address an issue in libwebrtc, and does not have | ||
| 35 | + * purposeful usecase itself. | ||
| 36 | + * | ||
| 37 | + * To use simulcast in libwebrtc, SimulcastEncoderAdapter is used. | ||
| 38 | + * SimulcastEncoderAdapter takes in a primary and fallback encoder. | ||
| 39 | + * If HardwareVideoEncoderFactory and SoftwareVideoEncoderFactory are | ||
| 40 | + * passed in directly as primary and fallback, when H.264 is used, | ||
| 41 | + * libwebrtc will crash. | ||
| 42 | + * | ||
| 43 | + * This is because SoftwareVideoEncoderFactory does not handle H.264, | ||
| 44 | + * so [SoftwareVideoEncoderFactory.createEncoder] returns null, and | ||
| 45 | + * the libwebrtc side does not handle nulls, regardless of whether the | ||
| 46 | + * fallback is actually used or not. | ||
| 47 | + * | ||
| 48 | + * To avoid nulls, we simply pass responsibility over to the HardwareVideoEncoderFactory. | ||
| 49 | + * This results in HardwareVideoEncoderFactory being both the primary and fallback, | ||
| 50 | + * but there aren't any specific problems in doing so. | ||
| 51 | + */ | ||
| 52 | + private class Fallback(private val hardwareVideoEncoderFactory: VideoEncoderFactory) : | ||
| 53 | + VideoEncoderFactory { | ||
| 54 | + | ||
| 55 | + private val softwareVideoEncoderFactory: VideoEncoderFactory = SoftwareVideoEncoderFactory() | ||
| 56 | + | ||
| 57 | + override fun createEncoder(info: VideoCodecInfo): VideoEncoder? { | ||
| 58 | + val softwareEncoder = softwareVideoEncoderFactory.createEncoder(info) | ||
| 59 | + val hardwareEncoder = hardwareVideoEncoderFactory.createEncoder(info) | ||
| 60 | + return if (hardwareEncoder != null && softwareEncoder != null) { | ||
| 61 | + VideoEncoderFallback(hardwareEncoder, softwareEncoder) | ||
| 62 | + } else { | ||
| 63 | + softwareEncoder ?: hardwareEncoder | ||
| 64 | + } | ||
| 65 | + } | ||
| 66 | + | ||
| 67 | + override fun getSupportedCodecs(): Array<VideoCodecInfo> { | ||
| 68 | + val supportedCodecInfos: MutableList<VideoCodecInfo> = mutableListOf() | ||
| 69 | + supportedCodecInfos.addAll(softwareVideoEncoderFactory.supportedCodecs) | ||
| 70 | + supportedCodecInfos.addAll(hardwareVideoEncoderFactory.supportedCodecs) | ||
| 71 | + return supportedCodecInfos.toTypedArray() | ||
| 72 | + } | ||
| 73 | + | ||
| 74 | + } | ||
| 75 | + | ||
| 76 | + /** | ||
| 77 | + * Wraps each stream encoder and performs the following: | ||
| 78 | + * - Starts up a single thread | ||
| 79 | + * - When the width/height from [initEncode] doesn't match the frame buffer's, | ||
| 80 | + * scales the frame prior to encoding. | ||
| 81 | + * - Always calls the encoder on the thread. | ||
| 82 | + */ | ||
| 83 | + private class StreamEncoderWrapper(private val encoder: VideoEncoder) : VideoEncoder { | ||
| 84 | + | ||
| 85 | + val executor: ExecutorService = Executors.newSingleThreadExecutor() | ||
| 86 | + var streamSettings: VideoEncoder.Settings? = null | ||
| 87 | + | ||
| 88 | + override fun initEncode( | ||
| 89 | + settings: VideoEncoder.Settings, | ||
| 90 | + callback: VideoEncoder.Callback? | ||
| 91 | + ): VideoCodecStatus { | ||
| 92 | + streamSettings = settings | ||
| 93 | + val future = executor.submit(Callable { | ||
| 94 | + LKLog.i { | ||
| 95 | + """initEncode() thread=${Thread.currentThread().name} [${Thread.currentThread().id}] | ||
| 96 | + | streamSettings: | ||
| 97 | + | numberOfCores=${settings.numberOfCores} | ||
| 98 | + | width=${settings.width} | ||
| 99 | + | height=${settings.height} | ||
| 100 | + | startBitrate=${settings.startBitrate} | ||
| 101 | + | maxFramerate=${settings.maxFramerate} | ||
| 102 | + | automaticResizeOn=${settings.automaticResizeOn} | ||
| 103 | + | numberOfSimulcastStreams=${settings.numberOfSimulcastStreams} | ||
| 104 | + | lossNotification=${settings.capabilities.lossNotification} | ||
| 105 | + """.trimMargin() | ||
| 106 | + } | ||
| 107 | + return@Callable encoder.initEncode(settings, callback) | ||
| 108 | + }) | ||
| 109 | + return future.get() | ||
| 110 | + } | ||
| 111 | + | ||
| 112 | + override fun release(): VideoCodecStatus { | ||
| 113 | + val future = executor.submit(Callable { return@Callable encoder.release() }) | ||
| 114 | + return future.get() | ||
| 115 | + } | ||
| 116 | + | ||
| 117 | + override fun encode( | ||
| 118 | + frame: VideoFrame, | ||
| 119 | + encodeInfo: VideoEncoder.EncodeInfo? | ||
| 120 | + ): VideoCodecStatus { | ||
| 121 | + val future = executor.submit(Callable { | ||
| 122 | + //LKLog.d { "encode() buffer=${frame.buffer}, thread=${Thread.currentThread().name} " + | ||
| 123 | + // "[${Thread.currentThread().id}]" } | ||
| 124 | + if (streamSettings == null) { | ||
| 125 | + return@Callable encoder.encode(frame, encodeInfo) | ||
| 126 | + } else if (frame.buffer.width == streamSettings!!.width) { | ||
| 127 | + return@Callable encoder.encode(frame, encodeInfo) | ||
| 128 | + } else { | ||
| 129 | + // The incoming buffer is different than the streamSettings received in initEncode() | ||
| 130 | + // Need to scale. | ||
| 131 | + val originalBuffer = frame.buffer | ||
| 132 | + // TODO: Do we need to handle when the scale factor is weird? | ||
| 133 | + val adaptedBuffer = originalBuffer.cropAndScale( | ||
| 134 | + 0, 0, originalBuffer.width, originalBuffer.height, | ||
| 135 | + streamSettings!!.width, streamSettings!!.height | ||
| 136 | + ) | ||
| 137 | + val adaptedFrame = VideoFrame(adaptedBuffer, frame.rotation, frame.timestampNs) | ||
| 138 | + val result = encoder.encode(adaptedFrame, encodeInfo) | ||
| 139 | + adaptedBuffer.release() | ||
| 140 | + return@Callable result | ||
| 141 | + } | ||
| 142 | + }) | ||
| 143 | + return future.get() | ||
| 144 | + } | ||
| 145 | + | ||
| 146 | + override fun setRateAllocation( | ||
| 147 | + allocation: VideoEncoder.BitrateAllocation?, | ||
| 148 | + frameRate: Int | ||
| 149 | + ): VideoCodecStatus { | ||
| 150 | + val future = executor.submit(Callable { | ||
| 151 | + return@Callable encoder.setRateAllocation( | ||
| 152 | + allocation, | ||
| 153 | + frameRate | ||
| 154 | + ) | ||
| 155 | + }) | ||
| 156 | + return future.get() | ||
| 157 | + } | ||
| 158 | + | ||
| 159 | + override fun getScalingSettings(): VideoEncoder.ScalingSettings { | ||
| 160 | + val future = executor.submit(Callable { return@Callable encoder.scalingSettings }) | ||
| 161 | + return future.get() | ||
| 162 | + } | ||
| 163 | + | ||
| 164 | + override fun getImplementationName(): String { | ||
| 165 | + val future = executor.submit(Callable { return@Callable encoder.implementationName }) | ||
| 166 | + return future.get() | ||
| 167 | + } | ||
| 168 | + } | ||
| 169 | + | ||
| 170 | + private class StreamEncoderWrapperFactory(private val factory: VideoEncoderFactory) : | ||
| 171 | + VideoEncoderFactory { | ||
| 172 | + override fun createEncoder(videoCodecInfo: VideoCodecInfo?): VideoEncoder? { | ||
| 173 | + val encoder = factory.createEncoder(videoCodecInfo) | ||
| 174 | + if (encoder == null) { | ||
| 175 | + return null | ||
| 176 | + } | ||
| 177 | + return StreamEncoderWrapper(encoder) | ||
| 178 | + } | ||
| 179 | + | ||
| 180 | + override fun getSupportedCodecs(): Array<VideoCodecInfo> { | ||
| 181 | + return factory.supportedCodecs | ||
| 182 | + } | ||
| 183 | + } | ||
| 184 | + | ||
| 185 | + | ||
| 186 | + private val primary: VideoEncoderFactory | ||
| 187 | + private val fallback: VideoEncoderFactory | ||
| 188 | + private val native: SimulcastVideoEncoderFactory | ||
| 189 | + | ||
| 190 | + init { | ||
| 191 | + val hardwareVideoEncoderFactory = HardwareVideoEncoderFactory( | ||
| 192 | + sharedContext, enableIntelVp8Encoder, enableH264HighProfile | ||
| 193 | + ) | ||
| 194 | + primary = StreamEncoderWrapperFactory(hardwareVideoEncoderFactory) | ||
| 195 | + fallback = StreamEncoderWrapperFactory(Fallback(primary)) | ||
| 196 | + native = SimulcastVideoEncoderFactory(primary, fallback) | ||
| 197 | + } | ||
| 198 | + | ||
| 199 | + override fun createEncoder(info: VideoCodecInfo?): VideoEncoder? { | ||
| 200 | + return native.createEncoder(info) | ||
| 201 | + } | ||
| 202 | + | ||
| 203 | + override fun getSupportedCodecs(): Array<VideoCodecInfo> { | ||
| 204 | + return native.supportedCodecs | ||
| 205 | + } | ||
| 206 | + | ||
| 207 | +} |
-
请 注册 或 登录 后发表评论