davidliu
Committed by GitHub

Selfie ML video processing examples (#378)

* Selfie ML video processing examples

* spotless

* cleanup
正在显示 35 个修改的文件 包含 912 行增加4 行删除
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectMigrations">
<option name="MigrateToGradleLocalJavaHome">
<set>
<option value="$PROJECT_DIR$" />
</set>
</option>
</component>
</project>
\ No newline at end of file
... ...
... ... @@ -23,6 +23,11 @@ buildscript {
apply plugin: 'io.codearte.nexus-staging'
subprojects {
// Ignore examples folder, it's not a module itself.
if (project.name == "examples") {
return
}
repositories {
google()
mavenCentral()
... ...
ext {
android_build_tools_version = '8.0.2'
android_build_tools_version = '8.2.2'
compose_version = '1.2.1'
compose_compiler_version = '1.4.5'
kotlin_version = '1.8.20'
java_version = JavaVersion.VERSION_1_8
dokka_version = '1.5.0'
androidSdk = [
compileVersion: 33,
targetVersion : 33,
compileVersion: 34,
targetVersion : 34,
minVersion : 21,
]
versions = [
... ...
/build
\ No newline at end of file
... ...
plugins {
id 'com.android.application'
id 'org.jetbrains.kotlin.android'
}
android {
namespace 'io.livekit.android.selfie'
compileSdk androidSdk.compileVersion
defaultConfig {
applicationId "io.livekit.android.selfie"
minSdk androidSdk.minVersion
targetSdk androidSdk.targetVersion
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
compileOptions {
sourceCompatibility java_version
targetCompatibility java_version
}
kotlinOptions {
jvmTarget = java_version
}
}
dependencies {
implementation 'com.google.mlkit:segmentation-selfie:16.0.0-beta4'
api project(":livekit-android-sdk")
api "androidx.core:core-ktx:${versions.androidx_core}"
implementation 'androidx.appcompat:appcompat:1.6.1'
implementation 'com.google.android.material:material:1.11.0'
api deps.coroutines.lib
api "androidx.lifecycle:lifecycle-runtime-ktx:${versions.androidx_lifecycle}"
api "androidx.lifecycle:lifecycle-viewmodel-ktx:${versions.androidx_lifecycle}"
api "androidx.lifecycle:lifecycle-common-java8:${versions.androidx_lifecycle}"
testImplementation 'junit:junit:4.13.2'
androidTestImplementation 'androidx.test.ext:junit:1.1.5'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.5.1'
}
... ...
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile
\ No newline at end of file
... ...
/*
* Copyright 2024 LiveKit, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.livekit.android.selfie
import androidx.test.ext.junit.runners.AndroidJUnit4
import androidx.test.platform.app.InstrumentationRegistry
import org.junit.Assert.*
import org.junit.Test
import org.junit.runner.RunWith
/**
* Instrumented test, which will execute on an Android device.
*
* See [testing documentation](http://d.android.com/tools/testing).
*/
@RunWith(AndroidJUnit4::class)
class ExampleInstrumentedTest {
@Test
fun useAppContext() {
// Context of the app under test.
val appContext = InstrumentationRegistry.getInstrumentation().targetContext
assertEquals("io.livekit.android.selfie", appContext.packageName)
}
}
... ...
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android">
<application
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/Theme.Livekitandroid">
<activity
android:name=".MainActivity"
android:exported="true">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>
... ...
/*
* Copyright 2024 LiveKit, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.livekit.android.selfie
import android.Manifest
import android.content.pm.PackageManager
import android.os.Bundle
import android.widget.Toast
import androidx.activity.ComponentActivity
import androidx.activity.result.contract.ActivityResultContracts
import androidx.appcompat.app.AppCompatActivity
import androidx.core.content.ContextCompat
import androidx.lifecycle.ViewModelProvider
import io.livekit.android.renderer.TextureViewRenderer
class MainActivity : AppCompatActivity() {
lateinit var viewModel: MainViewModel
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
viewModel = ViewModelProvider(this)[MainViewModel::class.java]
val renderer = findViewById<TextureViewRenderer>(R.id.renderer)
viewModel.room.initVideoRenderer(renderer)
viewModel.track.observe(this) { track ->
track?.addRenderer(renderer)
}
requestNeededPermissions {
viewModel.startCapture()
}
}
}
fun ComponentActivity.requestNeededPermissions(onPermissionsGranted: (() -> Unit)? = null) {
val requestPermissionLauncher =
registerForActivityResult(
ActivityResultContracts.RequestMultiplePermissions(),
) { grants ->
// Check if any permissions weren't granted.
for (grant in grants.entries) {
if (!grant.value) {
Toast.makeText(
this,
"Missing permission: ${grant.key}",
Toast.LENGTH_SHORT,
)
.show()
}
}
// If all granted, notify if needed.
if (onPermissionsGranted != null && grants.all { it.value }) {
onPermissionsGranted()
}
}
val neededPermissions = listOf(Manifest.permission.RECORD_AUDIO, Manifest.permission.CAMERA)
.filter { ContextCompat.checkSelfPermission(this, it) == PackageManager.PERMISSION_DENIED }
.toTypedArray()
if (neededPermissions.isNotEmpty()) {
requestPermissionLauncher.launch(neededPermissions)
} else {
onPermissionsGranted?.invoke()
}
}
... ...
/*
* Copyright 2024 LiveKit, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.livekit.android.selfie
import android.app.Application
import androidx.lifecycle.AndroidViewModel
import androidx.lifecycle.MutableLiveData
import io.livekit.android.LiveKit
import io.livekit.android.LiveKitOverrides
import io.livekit.android.room.track.CameraPosition
import io.livekit.android.room.track.LocalVideoTrack
import io.livekit.android.room.track.LocalVideoTrackOptions
import kotlinx.coroutines.Dispatchers
import livekit.org.webrtc.EglBase
class MainViewModel(application: Application) : AndroidViewModel(application) {
val eglBase = EglBase.create()
val room = LiveKit.create(
application,
overrides = LiveKitOverrides(
eglBase = eglBase,
),
)
val track = MutableLiveData<LocalVideoTrack?>(null)
// For direct I420 processing:
// val processor = SelfieVideoProcessor(Dispatchers.IO)
val processor = SelfieBitmapVideoProcessor(eglBase, Dispatchers.IO)
fun startCapture() {
val selfieVideoTrack = room.localParticipant.createVideoTrack(
options = LocalVideoTrackOptions(position = CameraPosition.FRONT),
videoProcessor = processor,
)
selfieVideoTrack.startCapture()
track.postValue(selfieVideoTrack)
}
override fun onCleared() {
super.onCleared()
track.value?.stopCapture()
room.release()
processor.dispose()
}
}
... ...
/*
* Copyright 2024 LiveKit, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.livekit.android.selfie
import android.graphics.BitmapFactory
import android.graphics.Color
import android.graphics.ImageFormat
import android.graphics.Matrix
import android.graphics.Paint
import android.graphics.Rect
import android.graphics.YuvImage
import android.os.Build
import android.util.Log
import android.view.Surface
import androidx.core.graphics.set
import com.google.mlkit.vision.common.InputImage
import com.google.mlkit.vision.segmentation.Segmentation
import com.google.mlkit.vision.segmentation.Segmenter
import com.google.mlkit.vision.segmentation.selfie.SelfieSegmenterOptions
import kotlinx.coroutines.CoroutineDispatcher
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.cancel
import kotlinx.coroutines.channels.BufferOverflow
import kotlinx.coroutines.flow.MutableSharedFlow
import kotlinx.coroutines.launch
import kotlinx.coroutines.sync.Mutex
import livekit.org.webrtc.EglBase
import livekit.org.webrtc.SurfaceTextureHelper
import livekit.org.webrtc.VideoFrame
import livekit.org.webrtc.VideoProcessor
import livekit.org.webrtc.VideoSink
import livekit.org.webrtc.YuvHelper
import java.io.ByteArrayOutputStream
import java.nio.ByteBuffer
class SelfieBitmapVideoProcessor(eglBase: EglBase, dispatcher: CoroutineDispatcher) : VideoProcessor {
private var targetSink: VideoSink? = null
private val segmenter: Segmenter
private var lastRotation = 0
private var lastWidth = 0
private var lastHeight = 0
private val surfaceTextureHelper = SurfaceTextureHelper.create("BitmapToYUV", eglBase.eglBaseContext)
private val surface = Surface(surfaceTextureHelper.surfaceTexture)
private val scope = CoroutineScope(dispatcher)
private val taskFlow = MutableSharedFlow<VideoFrame>(
replay = 0,
extraBufferCapacity = 1,
onBufferOverflow = BufferOverflow.SUSPEND,
)
init {
val options =
SelfieSegmenterOptions.Builder()
.setDetectorMode(SelfieSegmenterOptions.STREAM_MODE)
.build()
segmenter = Segmentation.getClient(options)
// Funnel processing into a single flow that won't buffer,
// since processing will be slower than video capture
scope.launch {
taskFlow.collect { frame ->
processFrame(frame)
}
}
}
override fun onCapturerStarted(started: Boolean) {
if (started) {
surfaceTextureHelper.startListening { frame ->
targetSink?.onFrame(frame)
}
}
}
override fun onCapturerStopped() {
surfaceTextureHelper.stopListening()
}
override fun onFrameCaptured(frame: VideoFrame) {
if (taskFlow.tryEmit(frame)) {
frame.retain()
}
}
suspend fun processFrame(frame: VideoFrame) {
// toI420 causes a retain, so a corresponding frameBuffer.release is needed when done.
val frameBuffer = frame.buffer.toI420() ?: return
val rotationDegrees = frame.rotation
val dataY = frameBuffer.dataY
val dataU = frameBuffer.dataU
val dataV = frameBuffer.dataV
val nv12Buffer = ByteBuffer.allocateDirect(dataY.limit() + dataU.limit() + dataV.limit())
// For some reason, I420ToNV12 actually expects YV12
YuvHelper.I420ToNV12(
frameBuffer.dataY,
frameBuffer.strideY,
frameBuffer.dataV,
frameBuffer.strideV,
frameBuffer.dataU,
frameBuffer.strideU,
nv12Buffer,
frameBuffer.width,
frameBuffer.height,
)
// Use YuvImage to convert to bitmap
val yuvImage = YuvImage(nv12Buffer.array(), ImageFormat.NV21, frameBuffer.width, frameBuffer.height, null)
val stream = ByteArrayOutputStream()
yuvImage.compressToJpeg(Rect(0, 0, frameBuffer.width, frameBuffer.height), 100, stream)
val bitmap = BitmapFactory.decodeByteArray(
stream.toByteArray(),
0,
stream.size(),
BitmapFactory.Options().apply { inMutable = true },
)
// No longer need the original frame buffer any more.
frameBuffer.release()
frame.release()
val inputImage = InputImage.fromBitmap(bitmap, 0)
val task = segmenter.process(inputImage)
val latch = Mutex(true)
task.addOnSuccessListener { segmentationMask ->
val mask = segmentationMask.buffer
// Do some image processing
for (y in 0 until segmentationMask.height) {
for (x in 0 until segmentationMask.width) {
val backgroundConfidence = 1 - mask.float
if (backgroundConfidence > 0.8f) {
bitmap[x, y] = Color.GREEN // Color off the background
}
}
}
if (lastRotation != rotationDegrees) {
surfaceTextureHelper?.setFrameRotation(rotationDegrees)
lastRotation = rotationDegrees
}
if (lastWidth != bitmap.width || lastHeight != bitmap.height) {
surfaceTextureHelper?.setTextureSize(bitmap.width, bitmap.height)
lastWidth = bitmap.width
lastHeight = bitmap.height
}
surfaceTextureHelper?.handler?.post {
val canvas = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
surface.lockHardwareCanvas()
} else {
surface.lockCanvas(null)
}
if (canvas != null) {
canvas.drawBitmap(bitmap, Matrix(), Paint())
surface.unlockCanvasAndPost(canvas)
}
bitmap.recycle()
latch.unlock()
}
}.addOnFailureListener {
Log.e("SelfieVideoProcessor", "failed to process frame!")
}
latch.lock()
}
override fun setSink(sink: VideoSink?) {
targetSink = sink
}
fun dispose() {
segmenter.close()
surfaceTextureHelper.stopListening()
surfaceTextureHelper.dispose()
scope.cancel()
}
}
... ...
/*
* Copyright 2024 LiveKit, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.livekit.android.selfie
import android.util.Log
import com.google.mlkit.vision.common.InputImage
import com.google.mlkit.vision.segmentation.Segmentation
import com.google.mlkit.vision.segmentation.Segmenter
import com.google.mlkit.vision.segmentation.selfie.SelfieSegmenterOptions
import kotlinx.coroutines.CoroutineDispatcher
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.channels.BufferOverflow
import kotlinx.coroutines.flow.MutableSharedFlow
import kotlinx.coroutines.launch
import livekit.org.webrtc.VideoFrame
import livekit.org.webrtc.VideoProcessor
import livekit.org.webrtc.VideoSink
import java.nio.ByteBuffer
class SelfieVideoProcessor(dispatcher: CoroutineDispatcher) : VideoProcessor {
private var targetSink: VideoSink? = null
private val segmenter: Segmenter
private val scope = CoroutineScope(dispatcher)
private val taskFlow = MutableSharedFlow<VideoFrame>(
replay = 0,
extraBufferCapacity = 1,
onBufferOverflow = BufferOverflow.SUSPEND,
)
init {
val options =
SelfieSegmenterOptions.Builder()
.setDetectorMode(SelfieSegmenterOptions.STREAM_MODE)
.build()
segmenter = Segmentation.getClient(options)
// Funnel processing into a single flow that won't buffer,
// since processing will be slower than video capture
scope.launch {
taskFlow.collect { frame ->
processFrame(frame)
}
}
}
override fun onCapturerStarted(started: Boolean) {
}
override fun onCapturerStopped() {
}
override fun onFrameCaptured(frame: VideoFrame) {
if (taskFlow.tryEmit(frame)) {
frame.retain()
}
}
fun processFrame(frame: VideoFrame) {
// toI420 causes a retain, so a corresponding frameBuffer.release is needed when done.
val frameBuffer = frame.buffer.toI420() ?: return
val byteBuffer = ByteBuffer.allocateDirect(frameBuffer.dataY.limit() + frameBuffer.dataV.limit() + frameBuffer.dataU.limit())
// YV12 is exactly like I420, but the order of the U and V planes is reversed.
// In the name, "YV" refers to the plane order: Y, then V (then U).
.put(frameBuffer.dataY)
.put(frameBuffer.dataV)
.put(frameBuffer.dataU)
val image = InputImage.fromByteBuffer(
byteBuffer,
frameBuffer.width,
frameBuffer.height,
0,
InputImage.IMAGE_FORMAT_YV12,
)
val task = segmenter.process(image)
task.addOnSuccessListener { segmentationMask ->
val mask = segmentationMask.buffer
val dataY = frameBuffer.dataY
// Do some image processing
for (i in 0 until segmentationMask.height) {
for (j in 0 until segmentationMask.width) {
val backgroundConfidence = 1 - mask.float
if (backgroundConfidence > 0.8f) {
val position = dataY.position()
val yValue = 0x80.toByte()
dataY.position(position)
dataY.put(yValue)
} else {
dataY.position(dataY.position() + 1)
}
}
}
// Send the final frame off to the sink.
targetSink?.onFrame(VideoFrame(frameBuffer, frame.rotation, frame.timestampNs))
// Release any remaining resources
frameBuffer.release()
frame.release()
}.addOnFailureListener {
Log.e("SelfieVideoProcessor", "failed to process frame!")
}
}
override fun setSink(sink: VideoSink?) {
targetSink = sink
}
fun dispose() {
segmenter.close()
}
}
... ...
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path
android:fillColor="#3DDC84"
android:pathData="M0,0h108v108h-108z" />
<path
android:fillColor="#00000000"
android:pathData="M9,0L9,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,0L19,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,0L29,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,0L39,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,0L49,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,0L59,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,0L69,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,0L79,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M89,0L89,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M99,0L99,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,9L108,9"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,19L108,19"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,29L108,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,39L108,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,49L108,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,59L108,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,69L108,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,79L108,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,89L108,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,99L108,99"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,29L89,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,39L89,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,49L89,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,59L89,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,69L89,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,79L89,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,19L29,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,19L39,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,19L49,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,19L59,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,19L69,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,19L79,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
</vector>
... ...
<vector xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:aapt="http://schemas.android.com/aapt"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path android:pathData="M31,63.928c0,0 6.4,-11 12.1,-13.1c7.2,-2.6 26,-1.4 26,-1.4l38.1,38.1L107,108.928l-32,-1L31,63.928z">
<aapt:attr name="android:fillColor">
<gradient
android:endX="85.84757"
android:endY="92.4963"
android:startX="42.9492"
android:startY="49.59793"
android:type="linear">
<item
android:color="#44000000"
android:offset="0.0" />
<item
android:color="#00000000"
android:offset="1.0" />
</gradient>
</aapt:attr>
</path>
<path
android:fillColor="#FFFFFF"
android:fillType="nonZero"
android:pathData="M65.3,45.828l3.8,-6.6c0.2,-0.4 0.1,-0.9 -0.3,-1.1c-0.4,-0.2 -0.9,-0.1 -1.1,0.3l-3.9,6.7c-6.3,-2.8 -13.4,-2.8 -19.7,0l-3.9,-6.7c-0.2,-0.4 -0.7,-0.5 -1.1,-0.3C38.8,38.328 38.7,38.828 38.9,39.228l3.8,6.6C36.2,49.428 31.7,56.028 31,63.928h46C76.3,56.028 71.8,49.428 65.3,45.828zM43.4,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2c-0.3,-0.7 -0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C45.3,56.528 44.5,57.328 43.4,57.328L43.4,57.328zM64.6,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2s-0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C66.5,56.528 65.6,57.328 64.6,57.328L64.6,57.328z"
android:strokeWidth="1"
android:strokeColor="#00000000" />
</vector>
... ...
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="#F00"
tools:context=".MainActivity">
<io.livekit.android.renderer.TextureViewRenderer
android:id="@+id/renderer"
android:layout_width="match_parent"
android:layout_height="match_parent" />
</FrameLayout>
... ...
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
<monochrome android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>
... ...
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
<monochrome android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>
... ...
<resources xmlns:tools="http://schemas.android.com/tools">
<!-- Base application theme. -->
<style name="Base.Theme.Livekitandroid" parent="Theme.Material3.DayNight.NoActionBar">
<!-- Customize your dark theme here. -->
<!-- <item name="colorPrimary">@color/my_dark_primary</item> -->
</style>
</resources>
... ...
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="black">#FF000000</color>
<color name="white">#FFFFFFFF</color>
</resources>
... ...
<resources>
<string name="app_name">selfie-segmentation</string>
</resources>
\ No newline at end of file
... ...
<resources xmlns:tools="http://schemas.android.com/tools">
<!-- Base application theme. -->
<style name="Base.Theme.Livekitandroid" parent="Theme.Material3.DayNight.NoActionBar">
<!-- Customize your light theme here. -->
<!-- <item name="colorPrimary">@color/my_light_primary</item> -->
</style>
<style name="Theme.Livekitandroid" parent="Base.Theme.Livekitandroid" />
</resources>
... ...
/*
* Copyright 2024 LiveKit, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.livekit.android.selfie
import org.junit.Assert.*
import org.junit.Test
/**
* Example local unit test, which will execute on the development machine (host).
*
* See [testing documentation](http://d.android.com/tools/testing).
*/
class ExampleUnitTest {
@Test
fun addition_isCorrect() {
assertEquals(4, 2 + 2)
}
}
... ...
#Mon May 01 22:58:53 JST 2023
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.0-bin.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-8.2-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
... ...
... ... @@ -10,3 +10,4 @@ include ':livekit-lint'
include ':video-encode-decode-test'
include ':sample-app-basic'
include ':sample-app-record-local'
include ':examples:selfie-segmentation'
... ...