DummyVideoCapturer.kt
3.7 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
package io.livekit.android.videoencodedecode
import android.content.Context
import android.os.SystemClock
import androidx.annotation.ColorInt
import org.webrtc.*
import java.nio.ByteBuffer
import java.util.*
import java.util.concurrent.TimeUnit
class DummyVideoCapturer(@ColorInt var color: Int) : VideoCapturer {
var capturerObserver: CapturerObserver? = null
val timer = Timer()
var frameWidth = 0
var frameHeight = 0
private val tickTask: TimerTask = object : TimerTask() {
override fun run() {
tick()
}
}
fun tick() {
val videoFrame: VideoFrame = createFrame()
this.capturerObserver?.onFrameCaptured(videoFrame)
videoFrame.release()
}
override fun initialize(
surfaceTextureHelper: SurfaceTextureHelper,
applicationContext: Context,
capturerObserver: CapturerObserver
) {
this.capturerObserver = capturerObserver
}
override fun startCapture(width: Int, height: Int, framerate: Int) {
frameWidth = width
frameHeight = height
this.timer.schedule(this.tickTask, 0L, (1000 / framerate).toLong())
}
@Throws(InterruptedException::class)
override fun stopCapture() {
this.timer.cancel()
}
override fun changeCaptureFormat(width: Int, height: Int, framerate: Int) {}
override fun dispose() {
this.timer.cancel()
}
private fun createFrame(): VideoFrame {
val captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime())
val buffer = JavaI420Buffer.allocate(this.frameWidth, this.frameHeight)
encodeYUV420SP(buffer, this.color, frameWidth, frameHeight)
this.color = (color + 1) % 0xFFFFFF
return VideoFrame(buffer, 0, captureTimeNs)
}
override fun isScreencast(): Boolean {
return false
}
companion object {
// adapted from https://stackoverflow.com/a/13055615/295675
fun encodeYUV420SP(yuvBuffer: JavaI420Buffer, color: Int, width: Int, height: Int) {
var yIndex = 0
var uvIndex = 0
var R: Int
var G: Int
var B: Int
var Y: Int
var U: Int
var V: Int
val dataY: ByteBuffer = yuvBuffer.dataY
val dataU = yuvBuffer.dataU
val dataV = yuvBuffer.dataV
var index = 0
for (j in 0 until height) {
for (i in 0 until width) {
R = color and 0xff0000 shr 16
G = color and 0xff00 shr 8
B = color and 0xff shr 0
// well known RGB to YUV algorithm
Y = (66 * R + 129 * G + 25 * B + 128 shr 8) + 16
U = (-38 * R - 74 * G + 112 * B + 128 shr 8) + 128
V = (112 * R - 94 * G - 18 * B + 128 shr 8) + 128
// NV21 has a plane of Y and interleaved planes of VU each sampled by a factor of 2
// meaning for every 4 Y pixels there are 1 V and 1 U. Note the sampling is every other
// pixel AND every other scanline.
dataY[yIndex++] = (if (Y < 0) 0 else if (Y > 255) 255 else Y).toByte()
if (j % 2 == 0 && index % 2 == 0) {
dataV[uvIndex] = (if (V < 0) 0 else if (V > 255) 255 else V).toByte()
dataU[uvIndex] = (if (U < 0) 0 else if (U > 255) 255 else U).toByte()
uvIndex++
}
index++
}
}
}
}
}
private operator fun ByteBuffer.set(index: Int, value: Byte) {
put(index, value)
}