Committed by
GitHub
Add simulcastLayers to VideoTrackPublishOptions for directly specifying the reso…
…lutions to use (#746) Also fixes for choosing default encodings
正在显示
8 个修改的文件
包含
372 行增加
和
30 行删除
.changeset/pink-pans-glow.md
0 → 100644
.changeset/slimy-queens-serve.md
0 → 100644
.changeset/wicked-needles-call.md
0 → 100644
| @@ -6,4 +6,11 @@ | @@ -6,4 +6,11 @@ | ||
| 6 | <module name="livekit-android.examples.selfie-segmentation" target="17" /> | 6 | <module name="livekit-android.examples.selfie-segmentation" target="17" /> |
| 7 | </bytecodeTargetLevel> | 7 | </bytecodeTargetLevel> |
| 8 | </component> | 8 | </component> |
| 9 | + <component name="JavacSettings"> | ||
| 10 | + <option name="ADDITIONAL_OPTIONS_OVERRIDE"> | ||
| 11 | + <module name="livekit-android.livekit-lint" options="-proc:none" /> | ||
| 12 | + <module name="livekit-android.livekit-lint.main" options="-proc:none" /> | ||
| 13 | + <module name="livekit-android.livekit-lint.test" options="-proc:none" /> | ||
| 14 | + </option> | ||
| 15 | + </component> | ||
| 9 | </project> | 16 | </project> |
| @@ -51,6 +51,7 @@ import io.livekit.android.room.track.TrackPublication | @@ -51,6 +51,7 @@ import io.livekit.android.room.track.TrackPublication | ||
| 51 | import io.livekit.android.room.track.VideoCaptureParameter | 51 | import io.livekit.android.room.track.VideoCaptureParameter |
| 52 | import io.livekit.android.room.track.VideoCodec | 52 | import io.livekit.android.room.track.VideoCodec |
| 53 | import io.livekit.android.room.track.VideoEncoding | 53 | import io.livekit.android.room.track.VideoEncoding |
| 54 | +import io.livekit.android.room.track.VideoPreset | ||
| 54 | import io.livekit.android.room.track.screencapture.ScreenCaptureParams | 55 | import io.livekit.android.room.track.screencapture.ScreenCaptureParams |
| 55 | import io.livekit.android.room.util.EncodingUtils | 56 | import io.livekit.android.room.util.EncodingUtils |
| 56 | import io.livekit.android.rpc.RpcError | 57 | import io.livekit.android.rpc.RpcError |
| @@ -480,7 +481,10 @@ internal constructor( | @@ -480,7 +481,10 @@ internal constructor( | ||
| 480 | */ | 481 | */ |
| 481 | suspend fun publishVideoTrack( | 482 | suspend fun publishVideoTrack( |
| 482 | track: LocalVideoTrack, | 483 | track: LocalVideoTrack, |
| 483 | - options: VideoTrackPublishOptions = VideoTrackPublishOptions(null, videoTrackPublishDefaults), | 484 | + options: VideoTrackPublishOptions = VideoTrackPublishOptions( |
| 485 | + null, | ||
| 486 | + if (track.options.isScreencast) screenShareTrackPublishDefaults else videoTrackPublishDefaults, | ||
| 487 | + ), | ||
| 484 | publishListener: PublishListener? = null, | 488 | publishListener: PublishListener? = null, |
| 485 | ): Boolean { | 489 | ): Boolean { |
| 486 | @Suppress("NAME_SHADOWING") var options = options | 490 | @Suppress("NAME_SHADOWING") var options = options |
| @@ -514,7 +518,7 @@ internal constructor( | @@ -514,7 +518,7 @@ internal constructor( | ||
| 514 | options = options.copy(scalabilityMode = "L3T3_KEY") | 518 | options = options.copy(scalabilityMode = "L3T3_KEY") |
| 515 | } | 519 | } |
| 516 | } | 520 | } |
| 517 | - val encodings = computeVideoEncodings(track.dimensions, options) | 521 | + val encodings = computeVideoEncodings(track.options.isScreencast, track.dimensions, options) |
| 518 | val videoLayers = | 522 | val videoLayers = |
| 519 | EncodingUtils.videoLayersFromEncodings(track.dimensions.width, track.dimensions.height, encodings, isSVC) | 523 | EncodingUtils.videoLayersFromEncodings(track.dimensions.width, track.dimensions.height, encodings, isSVC) |
| 520 | 524 | ||
| @@ -722,7 +726,10 @@ internal constructor( | @@ -722,7 +726,10 @@ internal constructor( | ||
| 722 | options = options.copy(videoCodec = updatedCodec) | 726 | options = options.copy(videoCodec = updatedCodec) |
| 723 | 727 | ||
| 724 | // recompute encodings since bitrates/etc could have changed | 728 | // recompute encodings since bitrates/etc could have changed |
| 725 | - encodings = computeVideoEncodings((track as LocalVideoTrack).dimensions, options) | 729 | + val videoTrack = track as LocalVideoTrack |
| 730 | + | ||
| 731 | + encodings = computeVideoEncodings(videoTrack.options.isScreencast, videoTrack.dimensions, options) | ||
| 732 | + encodings // encodings is used in negotiate, this suppresses unused lint | ||
| 726 | } | 733 | } |
| 727 | } | 734 | } |
| 728 | } | 735 | } |
| @@ -751,35 +758,32 @@ internal constructor( | @@ -751,35 +758,32 @@ internal constructor( | ||
| 751 | } | 758 | } |
| 752 | 759 | ||
| 753 | private fun computeVideoEncodings( | 760 | private fun computeVideoEncodings( |
| 761 | + isScreenShare: Boolean, | ||
| 754 | dimensions: Track.Dimensions, | 762 | dimensions: Track.Dimensions, |
| 755 | options: VideoTrackPublishOptions, | 763 | options: VideoTrackPublishOptions, |
| 756 | ): List<RtpParameters.Encoding> { | 764 | ): List<RtpParameters.Encoding> { |
| 757 | val (width, height) = dimensions | 765 | val (width, height) = dimensions |
| 758 | - var encoding = options.videoEncoding | 766 | + var originalEncoding = options.videoEncoding |
| 759 | val simulcast = options.simulcast | 767 | val simulcast = options.simulcast |
| 760 | val scalabilityMode = options.scalabilityMode | 768 | val scalabilityMode = options.scalabilityMode |
| 761 | 769 | ||
| 762 | - if ((encoding == null && !simulcast) || width == 0 || height == 0) { | 770 | + if ((originalEncoding == null && !simulcast) || width == 0 || height == 0) { |
| 763 | return emptyList() | 771 | return emptyList() |
| 764 | } | 772 | } |
| 765 | 773 | ||
| 766 | - if (encoding == null) { | ||
| 767 | - encoding = EncodingUtils.determineAppropriateEncoding(width, height) | ||
| 768 | - LKLog.d { "using video encoding: $encoding" } | 774 | + if (originalEncoding == null) { |
| 775 | + originalEncoding = EncodingUtils.determineAppropriateEncoding(isScreenShare, width, height) | ||
| 776 | + LKLog.d { "using video encoding: $originalEncoding" } | ||
| 769 | } | 777 | } |
| 770 | 778 | ||
| 771 | val encodings = mutableListOf<RtpParameters.Encoding>() | 779 | val encodings = mutableListOf<RtpParameters.Encoding>() |
| 772 | 780 | ||
| 773 | if (scalabilityMode != null && isSVCCodec(options.videoCodec)) { | 781 | if (scalabilityMode != null && isSVCCodec(options.videoCodec)) { |
| 774 | - val rtpEncoding = encoding.toRtpEncoding() | 782 | + val rtpEncoding = originalEncoding.toRtpEncoding() |
| 775 | rtpEncoding.scalabilityMode = scalabilityMode | 783 | rtpEncoding.scalabilityMode = scalabilityMode |
| 776 | encodings.add(rtpEncoding) | 784 | encodings.add(rtpEncoding) |
| 777 | return encodings | 785 | return encodings |
| 778 | } else if (simulcast) { | 786 | } else if (simulcast) { |
| 779 | - val presets = EncodingUtils.presetsForResolution(width, height) | ||
| 780 | - val midPreset = presets[1] | ||
| 781 | - val lowPreset = presets[0] | ||
| 782 | - | ||
| 783 | fun addEncoding(videoEncoding: VideoEncoding, scale: Double) { | 787 | fun addEncoding(videoEncoding: VideoEncoding, scale: Double) { |
| 784 | if (scale < 1.0) { | 788 | if (scale < 1.0) { |
| 785 | LKLog.w { "Discarding encoding with a scale < 1.0: $scale." } | 789 | LKLog.w { "Discarding encoding with a scale < 1.0: $scale." } |
| @@ -793,27 +797,42 @@ internal constructor( | @@ -793,27 +797,42 @@ internal constructor( | ||
| 793 | encodings.add(videoEncoding.toRtpEncoding(rid, scale)) | 797 | encodings.add(videoEncoding.toRtpEncoding(rid, scale)) |
| 794 | } | 798 | } |
| 795 | 799 | ||
| 800 | + val presets = options.simulcastLayers | ||
| 801 | + ?: EncodingUtils.defaultSimulcastLayers( | ||
| 802 | + isScreenShare = isScreenShare, | ||
| 803 | + width = width, | ||
| 804 | + height = height, | ||
| 805 | + originalEncoding = originalEncoding, | ||
| 806 | + ) | ||
| 807 | + if (presets.isEmpty()) { | ||
| 808 | + LKLog.w { "Simulcast is enabled but an empty list was set for simulcastLayers!" } | ||
| 809 | + } | ||
| 810 | + | ||
| 796 | // if resolution is high enough, we send both h and q res. | 811 | // if resolution is high enough, we send both h and q res. |
| 797 | // otherwise only send h | 812 | // otherwise only send h |
| 798 | val size = max(width, height) | 813 | val size = max(width, height) |
| 799 | - val maxFps = encoding.maxFps | 814 | + val maxFps = originalEncoding.maxFps |
| 800 | fun calculateScaleDown(captureParam: VideoCaptureParameter): Double { | 815 | fun calculateScaleDown(captureParam: VideoCaptureParameter): Double { |
| 801 | val targetSize = max(captureParam.width, captureParam.height) | 816 | val targetSize = max(captureParam.width, captureParam.height) |
| 802 | return size / targetSize.toDouble() | 817 | return size / targetSize.toDouble() |
| 803 | } | 818 | } |
| 804 | - if (size >= 960) { | ||
| 805 | - val lowScale = calculateScaleDown(lowPreset.capture) | ||
| 806 | - val midScale = calculateScaleDown(midPreset.capture) | ||
| 807 | 819 | ||
| 808 | - addEncoding(lowPreset.encoding.copy(maxFps = min(lowPreset.encoding.maxFps, maxFps)), lowScale) | ||
| 809 | - addEncoding(midPreset.encoding.copy(maxFps = min(midPreset.encoding.maxFps, maxFps)), midScale) | ||
| 810 | - } else { | 820 | + // Add encodings from smallest to largest. |
| 821 | + val orderedPresets = presets.sortedByDescending { calculateScaleDown(it.capture) } | ||
| 822 | + val lowPreset = orderedPresets.getOrNull(0) | ||
| 823 | + val midPreset = orderedPresets.getOrNull(1) | ||
| 824 | + | ||
| 825 | + if (size >= 480 && lowPreset != null) { | ||
| 811 | val lowScale = calculateScaleDown(lowPreset.capture) | 826 | val lowScale = calculateScaleDown(lowPreset.capture) |
| 812 | addEncoding(lowPreset.encoding.copy(maxFps = min(lowPreset.encoding.maxFps, maxFps)), lowScale) | 827 | addEncoding(lowPreset.encoding.copy(maxFps = min(lowPreset.encoding.maxFps, maxFps)), lowScale) |
| 813 | } | 828 | } |
| 814 | - addEncoding(encoding, 1.0) | 829 | + if (size >= 960 && midPreset != null) { |
| 830 | + val midScale = calculateScaleDown(midPreset.capture) | ||
| 831 | + addEncoding(midPreset.encoding.copy(maxFps = min(midPreset.encoding.maxFps, maxFps)), midScale) | ||
| 832 | + } | ||
| 833 | + addEncoding(originalEncoding, 1.0) | ||
| 815 | } else { | 834 | } else { |
| 816 | - encodings.add(encoding.toRtpEncoding()) | 835 | + encodings.add(originalEncoding.toRtpEncoding()) |
| 817 | } | 836 | } |
| 818 | 837 | ||
| 819 | // Make largest size at front. addTransceiver seems to fail if ordered from smallest to largest. | 838 | // Make largest size at front. addTransceiver seems to fail if ordered from smallest to largest. |
| @@ -838,7 +857,7 @@ internal constructor( | @@ -838,7 +857,7 @@ internal constructor( | ||
| 838 | videoCodec = videoCodec.codecName, | 857 | videoCodec = videoCodec.codecName, |
| 839 | videoEncoding = options.backupCodec!!.encoding, | 858 | videoEncoding = options.backupCodec!!.encoding, |
| 840 | ) | 859 | ) |
| 841 | - val backupEncodings = computeVideoEncodings(track.dimensions, backupOptions) | 860 | + val backupEncodings = computeVideoEncodings(track.options.isScreencast, track.dimensions, backupOptions) |
| 842 | return backupOptions to backupEncodings | 861 | return backupOptions to backupEncodings |
| 843 | } | 862 | } |
| 844 | 863 | ||
| @@ -1713,8 +1732,19 @@ abstract class BaseVideoTrackPublishOptions { | @@ -1713,8 +1732,19 @@ abstract class BaseVideoTrackPublishOptions { | ||
| 1713 | * null value indicates default value (maintain framerate). | 1732 | * null value indicates default value (maintain framerate). |
| 1714 | */ | 1733 | */ |
| 1715 | abstract val degradationPreference: RtpParameters.DegradationPreference? | 1734 | abstract val degradationPreference: RtpParameters.DegradationPreference? |
| 1735 | + | ||
| 1736 | + /** | ||
| 1737 | + * Up to two additional simulcast layers to publish in addition to the original | ||
| 1738 | + * Track. Layers should be ordered from smallest to largest. Layers beyond the | ||
| 1739 | + * first two will be ignored. Any layers that have larger resolutions than the | ||
| 1740 | + * source resolution will also be ignored. | ||
| 1741 | + * | ||
| 1742 | + * When set to null, it defaults to H180 and H360. | ||
| 1743 | + */ | ||
| 1744 | + abstract val simulcastLayers: List<VideoPreset>? | ||
| 1716 | } | 1745 | } |
| 1717 | 1746 | ||
| 1747 | +// Remember when adding any defaults to add it in the copy constructor of VideoTrackPublishOptions. | ||
| 1718 | data class VideoTrackPublishDefaults( | 1748 | data class VideoTrackPublishDefaults( |
| 1719 | override val videoEncoding: VideoEncoding? = null, | 1749 | override val videoEncoding: VideoEncoding? = null, |
| 1720 | override val simulcast: Boolean = true, | 1750 | override val simulcast: Boolean = true, |
| @@ -1722,6 +1752,7 @@ data class VideoTrackPublishDefaults( | @@ -1722,6 +1752,7 @@ data class VideoTrackPublishDefaults( | ||
| 1722 | override val scalabilityMode: String? = null, | 1752 | override val scalabilityMode: String? = null, |
| 1723 | override val backupCodec: BackupVideoCodec? = null, | 1753 | override val backupCodec: BackupVideoCodec? = null, |
| 1724 | override val degradationPreference: RtpParameters.DegradationPreference? = null, | 1754 | override val degradationPreference: RtpParameters.DegradationPreference? = null, |
| 1755 | + override val simulcastLayers: List<VideoPreset>? = null, | ||
| 1725 | ) : BaseVideoTrackPublishOptions() | 1756 | ) : BaseVideoTrackPublishOptions() |
| 1726 | 1757 | ||
| 1727 | data class VideoTrackPublishOptions( | 1758 | data class VideoTrackPublishOptions( |
| @@ -1734,6 +1765,7 @@ data class VideoTrackPublishOptions( | @@ -1734,6 +1765,7 @@ data class VideoTrackPublishOptions( | ||
| 1734 | override val source: Track.Source? = null, | 1765 | override val source: Track.Source? = null, |
| 1735 | override val stream: String? = null, | 1766 | override val stream: String? = null, |
| 1736 | override val degradationPreference: RtpParameters.DegradationPreference? = null, | 1767 | override val degradationPreference: RtpParameters.DegradationPreference? = null, |
| 1768 | + override val simulcastLayers: List<VideoPreset>? = null, | ||
| 1737 | ) : BaseVideoTrackPublishOptions(), TrackPublishOptions { | 1769 | ) : BaseVideoTrackPublishOptions(), TrackPublishOptions { |
| 1738 | constructor( | 1770 | constructor( |
| 1739 | name: String? = null, | 1771 | name: String? = null, |
| @@ -1750,6 +1782,7 @@ data class VideoTrackPublishOptions( | @@ -1750,6 +1782,7 @@ data class VideoTrackPublishOptions( | ||
| 1750 | source = source, | 1782 | source = source, |
| 1751 | stream = stream, | 1783 | stream = stream, |
| 1752 | degradationPreference = base.degradationPreference, | 1784 | degradationPreference = base.degradationPreference, |
| 1785 | + simulcastLayers = base.simulcastLayers, | ||
| 1753 | ) | 1786 | ) |
| 1754 | 1787 | ||
| 1755 | fun createBackupOptions(): VideoTrackPublishOptions? { | 1788 | fun createBackupOptions(): VideoTrackPublishOptions? { |
| @@ -1798,6 +1831,7 @@ enum class AudioPresets( | @@ -1798,6 +1831,7 @@ enum class AudioPresets( | ||
| 1798 | MUSIC_HIGH_QUALITY_STEREO(128_000) | 1831 | MUSIC_HIGH_QUALITY_STEREO(128_000) |
| 1799 | } | 1832 | } |
| 1800 | 1833 | ||
| 1834 | +// Remember when adding any defaults to add it in the copy constructor of VideoTrackPublishOptions. | ||
| 1801 | /** | 1835 | /** |
| 1802 | * Default options for publishing an audio track. | 1836 | * Default options for publishing an audio track. |
| 1803 | */ | 1837 | */ |
| @@ -19,6 +19,9 @@ package io.livekit.android.room.track | @@ -19,6 +19,9 @@ package io.livekit.android.room.track | ||
| 19 | import livekit.org.webrtc.RtpParameters | 19 | import livekit.org.webrtc.RtpParameters |
| 20 | 20 | ||
| 21 | data class LocalVideoTrackOptions( | 21 | data class LocalVideoTrackOptions( |
| 22 | + /** | ||
| 23 | + * Whether this is a screenshare track. | ||
| 24 | + */ | ||
| 22 | val isScreencast: Boolean = false, | 25 | val isScreencast: Boolean = false, |
| 23 | /** | 26 | /** |
| 24 | * Preferred deviceId to capture from. If not set or found, | 27 | * Preferred deviceId to capture from. If not set or found, |
| @@ -110,6 +113,11 @@ interface VideoPreset { | @@ -110,6 +113,11 @@ interface VideoPreset { | ||
| 110 | val encoding: VideoEncoding | 113 | val encoding: VideoEncoding |
| 111 | } | 114 | } |
| 112 | 115 | ||
| 116 | +data class CustomVideoPreset( | ||
| 117 | + override val capture: VideoCaptureParameter, | ||
| 118 | + override val encoding: VideoEncoding, | ||
| 119 | +) : VideoPreset | ||
| 120 | + | ||
| 113 | /** | 121 | /** |
| 114 | * 16:9 Video presets along with suggested bitrates | 122 | * 16:9 Video presets along with suggested bitrates |
| 115 | */ | 123 | */ |
| 1 | /* | 1 | /* |
| 2 | - * Copyright 2023-2024 LiveKit, Inc. | 2 | + * Copyright 2023-2025 LiveKit, Inc. |
| 3 | * | 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); | 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. | 5 | * you may not use this file except in compliance with the License. |
| @@ -16,6 +16,9 @@ | @@ -16,6 +16,9 @@ | ||
| 16 | 16 | ||
| 17 | package io.livekit.android.room.util | 17 | package io.livekit.android.room.util |
| 18 | 18 | ||
| 19 | +import io.livekit.android.room.track.CustomVideoPreset | ||
| 20 | +import io.livekit.android.room.track.ScreenSharePresets | ||
| 21 | +import io.livekit.android.room.track.VideoCaptureParameter | ||
| 19 | import io.livekit.android.room.track.VideoEncoding | 22 | import io.livekit.android.room.track.VideoEncoding |
| 20 | import io.livekit.android.room.track.VideoPreset | 23 | import io.livekit.android.room.track.VideoPreset |
| 21 | import io.livekit.android.room.track.VideoPreset169 | 24 | import io.livekit.android.room.track.VideoPreset169 |
| @@ -35,6 +38,15 @@ internal object EncodingUtils { | @@ -35,6 +38,15 @@ internal object EncodingUtils { | ||
| 35 | val VIDEO_RIDS = arrayOf("q", "h", "f") | 38 | val VIDEO_RIDS = arrayOf("q", "h", "f") |
| 36 | 39 | ||
| 37 | // Note: maintain order from smallest to biggest. | 40 | // Note: maintain order from smallest to biggest. |
| 41 | + private val SCREENSHARE_PRESETS = listOf( | ||
| 42 | + ScreenSharePresets.H360_FPS3, | ||
| 43 | + ScreenSharePresets.H720_FPS5, | ||
| 44 | + ScreenSharePresets.H720_FPS15, | ||
| 45 | + ScreenSharePresets.H1080_FPS15, | ||
| 46 | + ScreenSharePresets.H1080_FPS30, | ||
| 47 | + ) | ||
| 48 | + | ||
| 49 | + // Note: maintain order from smallest to biggest. | ||
| 38 | private val PRESETS_16_9 = listOf( | 50 | private val PRESETS_16_9 = listOf( |
| 39 | VideoPreset169.H90, | 51 | VideoPreset169.H90, |
| 40 | VideoPreset169.H180, | 52 | VideoPreset169.H180, |
| @@ -60,8 +72,20 @@ internal object EncodingUtils { | @@ -60,8 +72,20 @@ internal object EncodingUtils { | ||
| 60 | VideoPreset43.H1440, | 72 | VideoPreset43.H1440, |
| 61 | ) | 73 | ) |
| 62 | 74 | ||
| 63 | - fun determineAppropriateEncoding(width: Int, height: Int): VideoEncoding { | ||
| 64 | - val presets = presetsForResolution(width, height) | 75 | + // Note: maintain order from smallest to biggest. |
| 76 | + private val DEFAULT_SIMULCAST_LAYERS_169 = listOf( | ||
| 77 | + VideoPreset169.H180, | ||
| 78 | + VideoPreset169.H360, | ||
| 79 | + ) | ||
| 80 | + | ||
| 81 | + // Note: maintain order from smallest to biggest. | ||
| 82 | + private val DEFAULT_SIMULCAST_LAYERS_43 = listOf( | ||
| 83 | + VideoPreset43.H180, | ||
| 84 | + VideoPreset43.H360, | ||
| 85 | + ) | ||
| 86 | + | ||
| 87 | + fun determineAppropriateEncoding(isScreenShare: Boolean, width: Int, height: Int): VideoEncoding { | ||
| 88 | + val presets = computeSuggestedPresets(isScreenShare, width, height) | ||
| 65 | 89 | ||
| 66 | // presets assume width is longest size | 90 | // presets assume width is longest size |
| 67 | val longestSize = max(width, height) | 91 | val longestSize = max(width, height) |
| @@ -72,7 +96,11 @@ internal object EncodingUtils { | @@ -72,7 +96,11 @@ internal object EncodingUtils { | ||
| 72 | return preset.encoding | 96 | return preset.encoding |
| 73 | } | 97 | } |
| 74 | 98 | ||
| 75 | - fun presetsForResolution(width: Int, height: Int): List<VideoPreset> { | 99 | + fun computeSuggestedPresets(isScreenShare: Boolean, width: Int, height: Int): List<VideoPreset> { |
| 100 | + if (isScreenShare) { | ||
| 101 | + return SCREENSHARE_PRESETS | ||
| 102 | + } | ||
| 103 | + | ||
| 76 | val longestSize = max(width, height) | 104 | val longestSize = max(width, height) |
| 77 | val shortestSize = min(width, height) | 105 | val shortestSize = min(width, height) |
| 78 | val aspectRatio = longestSize.toFloat() / shortestSize | 106 | val aspectRatio = longestSize.toFloat() / shortestSize |
| @@ -83,6 +111,41 @@ internal object EncodingUtils { | @@ -83,6 +111,41 @@ internal object EncodingUtils { | ||
| 83 | } | 111 | } |
| 84 | } | 112 | } |
| 85 | 113 | ||
| 114 | + fun defaultSimulcastLayers(isScreenShare: Boolean, width: Int, height: Int, originalEncoding: VideoEncoding): List<VideoPreset> { | ||
| 115 | + if (isScreenShare) { | ||
| 116 | + return computeDefaultScreenshareSimulcastLayers(width, height, originalEncoding) | ||
| 117 | + } | ||
| 118 | + val longestSize = max(width, height) | ||
| 119 | + val shortestSize = min(width, height) | ||
| 120 | + val aspectRatio = longestSize.toFloat() / shortestSize | ||
| 121 | + return if (abs(aspectRatio - 16f / 9f) < abs(aspectRatio - 4f / 3f)) { | ||
| 122 | + DEFAULT_SIMULCAST_LAYERS_169 | ||
| 123 | + } else { | ||
| 124 | + DEFAULT_SIMULCAST_LAYERS_43 | ||
| 125 | + } | ||
| 126 | + } | ||
| 127 | + | ||
| 128 | + fun computeDefaultScreenshareSimulcastLayers(width: Int, height: Int, originalEncoding: VideoEncoding): List<VideoPreset> { | ||
| 129 | + // pairs of ScaleDownBy to FPS | ||
| 130 | + val layers = listOf(2 to 3) | ||
| 131 | + | ||
| 132 | + return layers.map { (scaleDownBy, fps) -> | ||
| 133 | + CustomVideoPreset( | ||
| 134 | + capture = VideoCaptureParameter( | ||
| 135 | + width = width / scaleDownBy, | ||
| 136 | + height = height / scaleDownBy, | ||
| 137 | + maxFps = fps, | ||
| 138 | + adaptOutputToDimensions = false, | ||
| 139 | + ), | ||
| 140 | + encoding = VideoEncoding( | ||
| 141 | + maxBitrate = originalEncoding.maxBitrate / | ||
| 142 | + (scaleDownBy.toFloat().pow(2).roundToInt() * (originalEncoding.maxFps / fps)), | ||
| 143 | + maxFps = fps, | ||
| 144 | + ), | ||
| 145 | + ) | ||
| 146 | + } | ||
| 147 | + } | ||
| 148 | + | ||
| 86 | fun videoLayersFromEncodings( | 149 | fun videoLayersFromEncodings( |
| 87 | trackWidth: Int, | 150 | trackWidth: Int, |
| 88 | trackHeight: Int, | 151 | trackHeight: Int, |
| @@ -28,10 +28,12 @@ import io.livekit.android.room.DefaultsManager | @@ -28,10 +28,12 @@ import io.livekit.android.room.DefaultsManager | ||
| 28 | import io.livekit.android.room.RTCEngine | 28 | import io.livekit.android.room.RTCEngine |
| 29 | import io.livekit.android.room.track.LocalVideoTrack | 29 | import io.livekit.android.room.track.LocalVideoTrack |
| 30 | import io.livekit.android.room.track.LocalVideoTrackOptions | 30 | import io.livekit.android.room.track.LocalVideoTrackOptions |
| 31 | +import io.livekit.android.room.track.ScreenSharePresets | ||
| 31 | import io.livekit.android.room.track.Track | 32 | import io.livekit.android.room.track.Track |
| 32 | import io.livekit.android.room.track.TrackException | 33 | import io.livekit.android.room.track.TrackException |
| 33 | import io.livekit.android.room.track.VideoCaptureParameter | 34 | import io.livekit.android.room.track.VideoCaptureParameter |
| 34 | import io.livekit.android.room.track.VideoCodec | 35 | import io.livekit.android.room.track.VideoCodec |
| 36 | +import io.livekit.android.room.track.VideoPreset169 | ||
| 35 | import io.livekit.android.test.MockE2ETest | 37 | import io.livekit.android.test.MockE2ETest |
| 36 | import io.livekit.android.test.assert.assertIsClassList | 38 | import io.livekit.android.test.assert.assertIsClassList |
| 37 | import io.livekit.android.test.coroutines.toListUntilSignal | 39 | import io.livekit.android.test.coroutines.toListUntilSignal |
| @@ -288,15 +290,15 @@ class LocalParticipantMockE2ETest : MockE2ETest() { | @@ -288,15 +290,15 @@ class LocalParticipantMockE2ETest : MockE2ETest() { | ||
| 288 | ) | 290 | ) |
| 289 | } | 291 | } |
| 290 | 292 | ||
| 291 | - private fun createLocalTrack() = LocalVideoTrack( | 293 | + private fun createLocalTrack(width: Int = 1280, height: Int = 720, isScreencast: Boolean = false) = LocalVideoTrack( |
| 292 | capturer = MockVideoCapturer(), | 294 | capturer = MockVideoCapturer(), |
| 293 | source = mock(VideoSource::class.java), | 295 | source = mock(VideoSource::class.java), |
| 294 | name = "", | 296 | name = "", |
| 295 | options = LocalVideoTrackOptions( | 297 | options = LocalVideoTrackOptions( |
| 296 | - isScreencast = false, | 298 | + isScreencast = isScreencast, |
| 297 | deviceId = null, | 299 | deviceId = null, |
| 298 | position = null, | 300 | position = null, |
| 299 | - captureParams = VideoCaptureParameter(width = 0, height = 0, maxFps = 0), | 301 | + captureParams = VideoCaptureParameter(width = width, height = height, maxFps = 30), |
| 300 | ), | 302 | ), |
| 301 | rtcTrack = MockVideoStreamTrack(), | 303 | rtcTrack = MockVideoStreamTrack(), |
| 302 | peerConnectionFactory = component.peerConnectionFactory(), | 304 | peerConnectionFactory = component.peerConnectionFactory(), |
| @@ -307,6 +309,219 @@ class LocalParticipantMockE2ETest : MockE2ETest() { | @@ -307,6 +309,219 @@ class LocalParticipantMockE2ETest : MockE2ETest() { | ||
| 307 | ) | 309 | ) |
| 308 | 310 | ||
| 309 | @Test | 311 | @Test |
| 312 | + fun publishSimulcastDefaultLayers() = runTest { | ||
| 313 | + connect() | ||
| 314 | + | ||
| 315 | + val wsFactory = component.websocketFactory() | ||
| 316 | + wsFactory.ws.clearRequests() | ||
| 317 | + room.localParticipant.publishVideoTrack(track = createLocalTrack(width = 1280, height = 720)) | ||
| 318 | + | ||
| 319 | + testScheduler.advanceUntilIdle() | ||
| 320 | + | ||
| 321 | + val sentRequests = wsFactory.ws.sentRequests | ||
| 322 | + assertEquals(1, sentRequests.size) | ||
| 323 | + | ||
| 324 | + assertTrue( | ||
| 325 | + sentRequests.any { requestString -> | ||
| 326 | + val sentRequest = LivekitRtc.SignalRequest.newBuilder() | ||
| 327 | + .mergeFrom(requestString.toPBByteString()) | ||
| 328 | + .build() | ||
| 329 | + | ||
| 330 | + if (sentRequest.hasAddTrack()) { | ||
| 331 | + val addTrackRequest = sentRequest.addTrack | ||
| 332 | + println(addTrackRequest) | ||
| 333 | + if (addTrackRequest.type == LivekitModels.TrackType.VIDEO) { | ||
| 334 | + val layerList = addTrackRequest.layersList | ||
| 335 | + var correctLayers = layerList.size == 3 | ||
| 336 | + correctLayers = correctLayers && layerList.any { layer -> | ||
| 337 | + // original | ||
| 338 | + layer.quality == LivekitModels.VideoQuality.HIGH && | ||
| 339 | + layer.bitrate == VideoPreset169.H720.encoding.maxBitrate && | ||
| 340 | + layer.height == 720 && | ||
| 341 | + layer.width == 1280 | ||
| 342 | + } | ||
| 343 | + correctLayers = correctLayers && layerList.any { layer -> | ||
| 344 | + // default H360 | ||
| 345 | + layer.quality == LivekitModels.VideoQuality.MEDIUM && | ||
| 346 | + layer.bitrate == VideoPreset169.H360.encoding.maxBitrate && | ||
| 347 | + layer.height == VideoPreset169.H360.capture.height && | ||
| 348 | + layer.width == VideoPreset169.H360.capture.width | ||
| 349 | + } | ||
| 350 | + correctLayers = correctLayers && layerList.any { layer -> | ||
| 351 | + // default H180 | ||
| 352 | + layer.quality == LivekitModels.VideoQuality.LOW && | ||
| 353 | + layer.bitrate == VideoPreset169.H180.encoding.maxBitrate && | ||
| 354 | + layer.height == VideoPreset169.H180.capture.height && | ||
| 355 | + layer.width == VideoPreset169.H180.capture.width | ||
| 356 | + } | ||
| 357 | + return@any correctLayers | ||
| 358 | + } | ||
| 359 | + } | ||
| 360 | + return@any false | ||
| 361 | + }, | ||
| 362 | + ) | ||
| 363 | + } | ||
| 364 | + | ||
| 365 | + @Test | ||
| 366 | + fun publishSimulcastCustomLayers() = runTest { | ||
| 367 | + room.videoTrackPublishDefaults = room.videoTrackPublishDefaults.copy( | ||
| 368 | + simulcastLayers = listOf(VideoPreset169.H540, VideoPreset169.H90), | ||
| 369 | + ) | ||
| 370 | + connect() | ||
| 371 | + | ||
| 372 | + val wsFactory = component.websocketFactory() | ||
| 373 | + wsFactory.ws.clearRequests() | ||
| 374 | + room.localParticipant.publishVideoTrack(track = createLocalTrack(width = 1920, height = 1080)) | ||
| 375 | + | ||
| 376 | + testScheduler.advanceUntilIdle() | ||
| 377 | + | ||
| 378 | + val sentRequests = wsFactory.ws.sentRequests | ||
| 379 | + assertEquals(1, sentRequests.size) | ||
| 380 | + | ||
| 381 | + assertTrue( | ||
| 382 | + sentRequests.any { requestString -> | ||
| 383 | + val sentRequest = LivekitRtc.SignalRequest.newBuilder() | ||
| 384 | + .mergeFrom(requestString.toPBByteString()) | ||
| 385 | + .build() | ||
| 386 | + | ||
| 387 | + if (sentRequest.hasAddTrack()) { | ||
| 388 | + val addTrackRequest = sentRequest.addTrack | ||
| 389 | + println(addTrackRequest) | ||
| 390 | + if (addTrackRequest.type == LivekitModels.TrackType.VIDEO) { | ||
| 391 | + val layerList = addTrackRequest.layersList | ||
| 392 | + var correctLayers = layerList.size == 3 | ||
| 393 | + correctLayers = correctLayers && layerList.any { layer -> | ||
| 394 | + layer.quality == LivekitModels.VideoQuality.HIGH && | ||
| 395 | + layer.bitrate == VideoPreset169.H1080.encoding.maxBitrate && | ||
| 396 | + layer.height == VideoPreset169.H1080.capture.height && | ||
| 397 | + layer.width == VideoPreset169.H1080.capture.width | ||
| 398 | + } | ||
| 399 | + correctLayers = correctLayers && layerList.any { layer -> | ||
| 400 | + layer.quality == LivekitModels.VideoQuality.MEDIUM && | ||
| 401 | + layer.bitrate == VideoPreset169.H540.encoding.maxBitrate && | ||
| 402 | + layer.height == VideoPreset169.H540.capture.height && | ||
| 403 | + layer.width == VideoPreset169.H540.capture.width | ||
| 404 | + } | ||
| 405 | + correctLayers = correctLayers && layerList.any { layer -> | ||
| 406 | + layer.quality == LivekitModels.VideoQuality.LOW && | ||
| 407 | + layer.bitrate == VideoPreset169.H90.encoding.maxBitrate && | ||
| 408 | + layer.height == VideoPreset169.H90.capture.height && | ||
| 409 | + layer.width == VideoPreset169.H90.capture.width | ||
| 410 | + } | ||
| 411 | + return@any correctLayers | ||
| 412 | + } | ||
| 413 | + } | ||
| 414 | + return@any false | ||
| 415 | + }, | ||
| 416 | + ) | ||
| 417 | + } | ||
| 418 | + | ||
| 419 | + @Test | ||
| 420 | + fun publishSimulcastLargerLayersIgnored() = runTest { | ||
| 421 | + room.videoTrackPublishDefaults = room.videoTrackPublishDefaults.copy( | ||
| 422 | + simulcastLayers = listOf(VideoPreset169.H1080, VideoPreset169.H90), | ||
| 423 | + ) | ||
| 424 | + connect() | ||
| 425 | + | ||
| 426 | + val wsFactory = component.websocketFactory() | ||
| 427 | + wsFactory.ws.clearRequests() | ||
| 428 | + room.localParticipant.publishVideoTrack(track = createLocalTrack(width = VideoPreset169.H540.capture.width, height = VideoPreset169.H540.capture.height)) | ||
| 429 | + | ||
| 430 | + testScheduler.advanceUntilIdle() | ||
| 431 | + | ||
| 432 | + val sentRequests = wsFactory.ws.sentRequests | ||
| 433 | + assertEquals(1, sentRequests.size) | ||
| 434 | + | ||
| 435 | + assertTrue( | ||
| 436 | + sentRequests.any { requestString -> | ||
| 437 | + val sentRequest = LivekitRtc.SignalRequest.newBuilder() | ||
| 438 | + .mergeFrom(requestString.toPBByteString()) | ||
| 439 | + .build() | ||
| 440 | + | ||
| 441 | + if (sentRequest.hasAddTrack()) { | ||
| 442 | + val addTrackRequest = sentRequest.addTrack | ||
| 443 | + println(addTrackRequest) | ||
| 444 | + if (addTrackRequest.type == LivekitModels.TrackType.VIDEO) { | ||
| 445 | + val layerList = addTrackRequest.layersList | ||
| 446 | + assertEquals(2, layerList.size) | ||
| 447 | + | ||
| 448 | + assertTrue(layerList.none { layer -> layer.quality == LivekitModels.VideoQuality.HIGH }) | ||
| 449 | + assertTrue( | ||
| 450 | + layerList.any { layer -> | ||
| 451 | + layer.quality == LivekitModels.VideoQuality.MEDIUM && | ||
| 452 | + layer.bitrate == VideoPreset169.H540.encoding.maxBitrate && | ||
| 453 | + layer.height == VideoPreset169.H540.capture.height && | ||
| 454 | + layer.width == VideoPreset169.H540.capture.width | ||
| 455 | + }, | ||
| 456 | + ) | ||
| 457 | + assertTrue( | ||
| 458 | + layerList.any { layer -> | ||
| 459 | + layer.quality == LivekitModels.VideoQuality.LOW && | ||
| 460 | + layer.bitrate == VideoPreset169.H90.encoding.maxBitrate && | ||
| 461 | + layer.height == VideoPreset169.H90.capture.height && | ||
| 462 | + layer.width == VideoPreset169.H90.capture.width | ||
| 463 | + }, | ||
| 464 | + ) | ||
| 465 | + return@any true | ||
| 466 | + } | ||
| 467 | + } | ||
| 468 | + return@any false | ||
| 469 | + }, | ||
| 470 | + ) | ||
| 471 | + } | ||
| 472 | + | ||
| 473 | + @Test | ||
| 474 | + fun publishScreencastDefaultLayers() = runTest { | ||
| 475 | + connect() | ||
| 476 | + | ||
| 477 | + val wsFactory = component.websocketFactory() | ||
| 478 | + wsFactory.ws.clearRequests() | ||
| 479 | + room.localParticipant.publishVideoTrack(track = createLocalTrack(width = 1280, height = 720, isScreencast = true)) | ||
| 480 | + | ||
| 481 | + testScheduler.advanceUntilIdle() | ||
| 482 | + | ||
| 483 | + val sentRequests = wsFactory.ws.sentRequests | ||
| 484 | + assertEquals(1, sentRequests.size) | ||
| 485 | + | ||
| 486 | + assertTrue( | ||
| 487 | + sentRequests.any { requestString -> | ||
| 488 | + val sentRequest = LivekitRtc.SignalRequest.newBuilder() | ||
| 489 | + .mergeFrom(requestString.toPBByteString()) | ||
| 490 | + .build() | ||
| 491 | + | ||
| 492 | + if (sentRequest.hasAddTrack()) { | ||
| 493 | + val addTrackRequest = sentRequest.addTrack | ||
| 494 | + println(addTrackRequest) | ||
| 495 | + if (addTrackRequest.type == LivekitModels.TrackType.VIDEO) { | ||
| 496 | + val layerList = addTrackRequest.layersList | ||
| 497 | + assertEquals(2, layerList.size) | ||
| 498 | + assertTrue( | ||
| 499 | + layerList.any { layer -> | ||
| 500 | + // original | ||
| 501 | + layer.quality == LivekitModels.VideoQuality.MEDIUM && | ||
| 502 | + layer.bitrate == ScreenSharePresets.ORIGINAL.encoding.maxBitrate && | ||
| 503 | + layer.height == 720 && | ||
| 504 | + layer.width == 1280 | ||
| 505 | + }, | ||
| 506 | + ) | ||
| 507 | + assertTrue( | ||
| 508 | + layerList.any { layer -> | ||
| 509 | + // default simulcast layer | ||
| 510 | + layer.quality == LivekitModels.VideoQuality.LOW && | ||
| 511 | + layer.bitrate == ScreenSharePresets.ORIGINAL.encoding.maxBitrate / 40 && | ||
| 512 | + layer.height == 720 / 2 && | ||
| 513 | + layer.width == 1280 / 2 | ||
| 514 | + }, | ||
| 515 | + ) | ||
| 516 | + return@any true | ||
| 517 | + } | ||
| 518 | + } | ||
| 519 | + return@any false | ||
| 520 | + }, | ||
| 521 | + ) | ||
| 522 | + } | ||
| 523 | + | ||
| 524 | + @Test | ||
| 310 | fun publishSetCodecPreferencesH264() = runTest { | 525 | fun publishSetCodecPreferencesH264() = runTest { |
| 311 | room.videoTrackPublishDefaults = room.videoTrackPublishDefaults.copy(videoCodec = "h264") | 526 | room.videoTrackPublishDefaults = room.videoTrackPublishDefaults.copy(videoCodec = "h264") |
| 312 | connect() | 527 | connect() |
-
请 注册 或 登录 后发表评论