davidliu
Committed by GitHub

Audio/Video recording example (#154)

正在显示 34 个修改的文件 包含 942 行增加13 行删除
... ... @@ -8,7 +8,7 @@ import androidx.activity.ComponentActivity
import androidx.activity.result.contract.ActivityResultContracts
import androidx.core.content.ContextCompat
fun ComponentActivity.requestNeededPermissions() {
fun ComponentActivity.requestNeededPermissions(onPermissionsGranted: (() -> Unit)? = null) {
val requestPermissionLauncher =
registerForActivityResult(
ActivityResultContracts.RequestMultiplePermissions()
... ... @@ -24,6 +24,11 @@ fun ComponentActivity.requestNeededPermissions() {
.show()
}
}
// If all granted, notify if needed.
if (onPermissionsGranted != null && grants.all { it.value }) {
onPermissionsGranted()
}
}
val neededPermissions = listOf(Manifest.permission.RECORD_AUDIO, Manifest.permission.CAMERA)
... ... @@ -39,5 +44,7 @@ fun ComponentActivity.requestNeededPermissions() {
if (neededPermissions.isNotEmpty()) {
requestPermissionLauncher.launch(neededPermissions)
} else {
onPermissionsGranted?.invoke()
}
}
\ No newline at end of file
... ...
/build
\ No newline at end of file
... ...
# sample-app-record-local
An example showing how to save the local device's audio and video tracks.
While connected to a Room, this app will save a video from your microphone and camera. Audio samples
and video frames are passed into a `VideoFileRenderer` object, where they are then encoded using
`android.media.MediaCodec`and saved using a `android.media.MediaMuxer` into a video file.
Videos are saved to the app's external files directory (
normally `/sdcard/Android/data/io.livekit.android.sample.record/files/Movies`).
\ No newline at end of file
... ...
plugins {
id 'com.android.application'
id 'org.jetbrains.kotlin.android'
}
android {
compileSdk 32
defaultConfig {
applicationId "io.livekit.android.sample.record"
minSdk 21
targetSdk 32
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
vectorDrawables {
useSupportLibrary true
}
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
kotlinOptions {
jvmTarget = '1.8'
}
buildFeatures {
compose true
}
composeOptions {
kotlinCompilerExtensionVersion compose_compiler_version
}
packagingOptions {
resources {
excludes += '/META-INF/{AL2.0,LGPL2.1}'
}
}
}
dependencies {
implementation project(":sample-app-common")
implementation "androidx.compose.ui:ui:$compose_version"
implementation "androidx.compose.material:material:$compose_version"
implementation "androidx.compose.ui:ui-tooling-preview:$compose_version"
implementation "androidx.compose.runtime:runtime-livedata:$compose_version"
implementation 'androidx.lifecycle:lifecycle-runtime-ktx:2.3.1'
implementation 'androidx.activity:activity-compose:1.3.1'
testImplementation 'junit:junit:4.13.2'
androidTestImplementation 'androidx.test.ext:junit:1.1.3'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.4.0'
androidTestImplementation "androidx.compose.ui:ui-test-junit4:$compose_version"
debugImplementation "androidx.compose.ui:ui-tooling:$compose_version"
debugImplementation "androidx.compose.ui:ui-test-manifest:$compose_version"
}
\ No newline at end of file
... ...
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile
\ No newline at end of file
... ...
package io.livekit.android.sample.record
import androidx.test.platform.app.InstrumentationRegistry
import androidx.test.ext.junit.runners.AndroidJUnit4
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.Assert.*
/**
* Instrumented test, which will execute on an Android device.
*
* See [testing documentation](http://d.android.com/tools/testing).
*/
@RunWith(AndroidJUnit4::class)
class ExampleInstrumentedTest {
@Test
fun useAppContext() {
// Context of the app under test.
val appContext = InstrumentationRegistry.getInstrumentation().targetContext
assertEquals("io.livekit.android.sample.record", appContext.packageName)
}
}
\ No newline at end of file
... ...
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="io.livekit.android.sample.record">
<application
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:networkSecurityConfig="@xml/network_security_config"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/Theme.Livekitandroid">
<activity
android:name=".MainActivity"
android:exported="true"
android:label="@string/app_name"
android:theme="@style/Theme.Livekitandroid">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>
\ No newline at end of file
... ...
package io.livekit.android.sample.record
import android.os.Bundle
import android.os.Environment
import androidx.activity.ComponentActivity
import androidx.activity.compose.setContent
import androidx.compose.foundation.layout.Column
import androidx.compose.foundation.layout.fillMaxSize
import androidx.compose.material.Button
import androidx.compose.material.MaterialTheme
import androidx.compose.material.Surface
import androidx.compose.material.Text
import androidx.compose.runtime.getValue
import androidx.compose.runtime.livedata.observeAsState
import androidx.compose.ui.Modifier
import androidx.lifecycle.MutableLiveData
import androidx.lifecycle.lifecycleScope
import io.livekit.android.LiveKit
import io.livekit.android.LiveKitOverrides
import io.livekit.android.room.Room
import io.livekit.android.room.track.LocalVideoTrack
import io.livekit.android.room.track.Track
import io.livekit.android.sample.record.ui.theme.LivekitandroidTheme
import io.livekit.android.sample.util.requestNeededPermissions
import kotlinx.coroutines.launch
import org.webrtc.EglBase
import java.io.File
import java.io.IOException
import java.util.*
class MainActivity : ComponentActivity() {
lateinit var room: Room
var videoFileRenderer: VideoFileRenderer? = null
val connected = MutableLiveData(false)
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
// Create Room object.
room = LiveKit.create(
appContext = applicationContext,
overrides = LiveKitOverrides(
javaAudioDeviceModuleCustomizer = { builder ->
// Receive audio samples
builder.setSamplesReadyCallback { samples ->
videoFileRenderer?.onWebRtcAudioRecordSamplesReady(samples)
}
}
)
)
setContent {
LivekitandroidTheme {
Surface(modifier = Modifier.fillMaxSize(), color = MaterialTheme.colors.background) {
Column {
val isConnected by connected.observeAsState(false)
if (isConnected) {
Text(text = "Connected!")
Button(onClick = { disconnectRoom() }) {
Text("Disconnect")
}
} else {
Text(text = "Not Connected.")
Button(onClick = { connectToRoom() }) {
Text("Connect")
}
}
}
}
}
}
requestNeededPermissions()
}
private fun connectToRoom() {
val url = "wss://www.example.com"
val token = ""
lifecycleScope.launch {
// Connect to server.
room.connect(
url,
token,
)
val localParticipant = room.localParticipant
localParticipant.setMicrophoneEnabled(true)
localParticipant.setCameraEnabled(true)
// Create output file.
val dir = getExternalFilesDir(Environment.DIRECTORY_MOVIES)
val file = File(dir, "${Date().time}.mp4")
if (!file.createNewFile()) {
throw IOException()
}
// Setup video recording
val videoFileRenderer = VideoFileRenderer(
file.absolutePath,
EglBase.create().eglBaseContext,
true
)
this@MainActivity.videoFileRenderer = videoFileRenderer
// Attach to local video track.
val track = localParticipant.getTrackPublication(Track.Source.CAMERA)?.track as LocalVideoTrack
track.addRenderer(videoFileRenderer)
connected.value = true
}
}
fun disconnectRoom() {
room.disconnect()
videoFileRenderer?.release()
videoFileRenderer = null
connected.value = false
}
}
... ...
/**
* MIT License
* <p>
* Copyright (c) 2018 湖北捷智云技术有限公司
* <p>
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* <p>
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
* <p>
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
* <p>
* From https://github.com/flutter-webrtc/flutter-webrtc/blob/main/android/src/main/java/com/cloudwebrtc/webrtc/record/VideoFileRenderer.java
*/
package io.livekit.android.sample.record;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.view.Surface;
import org.webrtc.EglBase;
import org.webrtc.GlRectDrawer;
import org.webrtc.VideoFrame;
import org.webrtc.VideoFrameDrawer;
import org.webrtc.VideoSink;
import org.webrtc.audio.JavaAudioDeviceModule;
import org.webrtc.audio.JavaAudioDeviceModule.SamplesReadyCallback;
import java.io.IOException;
import java.nio.ByteBuffer;
public class VideoFileRenderer implements VideoSink, SamplesReadyCallback {
private static final String TAG = "VideoFileRenderer";
private final HandlerThread renderThread;
private final Handler renderThreadHandler;
private final HandlerThread audioThread;
private final Handler audioThreadHandler;
private int outputFileWidth = -1;
private int outputFileHeight = -1;
private ByteBuffer[] encoderOutputBuffers;
private ByteBuffer[] audioInputBuffers;
private ByteBuffer[] audioOutputBuffers;
private EglBase eglBase;
private EglBase.Context sharedContext;
private VideoFrameDrawer frameDrawer;
// TODO: these ought to be configurable as well
private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
private static final int FRAME_RATE = 30; // 30fps
private static final int IFRAME_INTERVAL = 5; // 5 seconds between I-frames
private MediaMuxer mediaMuxer;
private MediaCodec encoder;
private MediaCodec.BufferInfo bufferInfo, audioBufferInfo;
private int trackIndex = -1;
private int audioTrackIndex;
private boolean isRunning = true;
private GlRectDrawer drawer;
private Surface surface;
private MediaCodec audioEncoder;
VideoFileRenderer(String outputFile, final EglBase.Context sharedContext, boolean withAudio) throws IOException {
renderThread = new HandlerThread(TAG + "RenderThread");
renderThread.start();
renderThreadHandler = new Handler(renderThread.getLooper());
if (withAudio) {
audioThread = new HandlerThread(TAG + "AudioThread");
audioThread.start();
audioThreadHandler = new Handler(audioThread.getLooper());
} else {
audioThread = null;
audioThreadHandler = null;
}
bufferInfo = new MediaCodec.BufferInfo();
this.sharedContext = sharedContext;
// Create a MediaMuxer. We can't add the video track and start() the muxer here,
// because our MediaFormat doesn't have the Magic Goodies. These can only be
// obtained from the encoder after it has started processing data.
mediaMuxer = new MediaMuxer(outputFile,
MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
audioTrackIndex = withAudio ? -1 : 0;
}
private void initVideoEncoder() {
MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, outputFileWidth, outputFileHeight);
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, 6000000);
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
// Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
try {
encoder = MediaCodec.createEncoderByType(MIME_TYPE);
encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
renderThreadHandler.post(() -> {
eglBase = EglBase.create(sharedContext, EglBase.CONFIG_RECORDABLE);
surface = encoder.createInputSurface();
eglBase.createSurface(surface);
eglBase.makeCurrent();
drawer = new GlRectDrawer();
});
} catch (Exception e) {
Log.wtf(TAG, e);
}
}
@Override
public void onFrame(VideoFrame frame) {
frame.retain();
if (outputFileWidth == -1) {
outputFileWidth = frame.getRotatedWidth();
outputFileHeight = frame.getRotatedHeight();
initVideoEncoder();
}
renderThreadHandler.post(() -> renderFrameOnRenderThread(frame));
}
private void renderFrameOnRenderThread(VideoFrame frame) {
if (frameDrawer == null) {
frameDrawer = new VideoFrameDrawer();
}
frameDrawer.drawFrame(frame, drawer, null, 0, 0, outputFileWidth, outputFileHeight);
frame.release();
drainEncoder();
eglBase.swapBuffers();
}
/**
* Release all resources. All already posted frames will be rendered first.
*/
void release() {
isRunning = false;
if (audioThreadHandler != null)
audioThreadHandler.post(() -> {
if (audioEncoder != null) {
audioEncoder.stop();
audioEncoder.release();
}
audioThread.quit();
audioThreadHandler.removeCallbacksAndMessages(null);
});
renderThreadHandler.post(() -> {
if (encoder != null) {
encoder.stop();
encoder.release();
}
eglBase.release();
mediaMuxer.stop();
mediaMuxer.release();
renderThread.quit();
renderThreadHandler.removeCallbacksAndMessages(null);
});
}
private boolean encoderStarted = false;
private volatile boolean muxerStarted = false;
private long videoFrameStart = 0;
private void drainEncoder() {
if (!encoderStarted) {
encoder.start();
encoderOutputBuffers = encoder.getOutputBuffers();
encoderStarted = true;
return;
}
while (true) {
int encoderStatus = encoder.dequeueOutputBuffer(bufferInfo, 10000);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
break;
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
encoderOutputBuffers = encoder.getOutputBuffers();
Log.e(TAG, "encoder output buffers changed");
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// not expected for an encoder
MediaFormat newFormat = encoder.getOutputFormat();
Log.e(TAG, "encoder output format changed: " + newFormat);
trackIndex = mediaMuxer.addTrack(newFormat);
if (audioTrackIndex != -1 && !muxerStarted) {
mediaMuxer.start();
muxerStarted = true;
}
if (!muxerStarted)
break;
} else if (encoderStatus < 0) {
Log.e(TAG, "unexpected result fr om encoder.dequeueOutputBuffer: " + encoderStatus);
} else { // encoderStatus >= 0
try {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
Log.e(TAG, "encoderOutputBuffer " + encoderStatus + " was null");
break;
}
// It's usually necessary to adjust the ByteBuffer values to match BufferInfo.
encodedData.position(bufferInfo.offset);
encodedData.limit(bufferInfo.offset + bufferInfo.size);
if (videoFrameStart == 0 && bufferInfo.presentationTimeUs != 0) {
videoFrameStart = bufferInfo.presentationTimeUs;
}
bufferInfo.presentationTimeUs -= videoFrameStart;
if (muxerStarted)
mediaMuxer.writeSampleData(trackIndex, encodedData, bufferInfo);
isRunning = isRunning && (bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == 0;
encoder.releaseOutputBuffer(encoderStatus, false);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
break;
}
} catch (Exception e) {
Log.wtf(TAG, e);
break;
}
}
}
}
private long presTime = 0L;
private void drainAudio() {
if (audioBufferInfo == null)
audioBufferInfo = new MediaCodec.BufferInfo();
while (true) {
int encoderStatus = audioEncoder.dequeueOutputBuffer(audioBufferInfo, 10000);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
break;
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
audioOutputBuffers = audioEncoder.getOutputBuffers();
Log.w(TAG, "encoder output buffers changed");
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// not expected for an encoder
MediaFormat newFormat = audioEncoder.getOutputFormat();
Log.w(TAG, "encoder output format changed: " + newFormat);
audioTrackIndex = mediaMuxer.addTrack(newFormat);
if (trackIndex != -1 && !muxerStarted) {
mediaMuxer.start();
muxerStarted = true;
}
if (!muxerStarted)
break;
} else if (encoderStatus < 0) {
Log.e(TAG, "unexpected result fr om encoder.dequeueOutputBuffer: " + encoderStatus);
} else { // encoderStatus >= 0
try {
ByteBuffer encodedData = audioOutputBuffers[encoderStatus];
if (encodedData == null) {
Log.e(TAG, "encoderOutputBuffer " + encoderStatus + " was null");
break;
}
// It's usually necessary to adjust the ByteBuffer values to match BufferInfo.
encodedData.position(audioBufferInfo.offset);
encodedData.limit(audioBufferInfo.offset + audioBufferInfo.size);
if (muxerStarted)
mediaMuxer.writeSampleData(audioTrackIndex, encodedData, audioBufferInfo);
isRunning = isRunning && (audioBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == 0;
audioEncoder.releaseOutputBuffer(encoderStatus, false);
if ((audioBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
break;
}
} catch (Exception e) {
Log.wtf(TAG, e);
break;
}
}
}
}
@Override
public void onWebRtcAudioRecordSamplesReady(JavaAudioDeviceModule.AudioSamples audioSamples) {
if (!isRunning)
return;
audioThreadHandler.post(() -> {
if (audioEncoder == null) try {
audioEncoder = MediaCodec.createEncoderByType("audio/mp4a-latm");
MediaFormat format = new MediaFormat();
format.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, audioSamples.getChannelCount());
format.setInteger(MediaFormat.KEY_SAMPLE_RATE, audioSamples.getSampleRate());
format.setInteger(MediaFormat.KEY_BIT_RATE, 64 * 1024);
format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
audioEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
audioEncoder.start();
audioInputBuffers = audioEncoder.getInputBuffers();
audioOutputBuffers = audioEncoder.getOutputBuffers();
} catch (IOException exception) {
Log.wtf(TAG, exception);
}
int bufferIndex = audioEncoder.dequeueInputBuffer(0);
if (bufferIndex >= 0) {
ByteBuffer buffer = audioInputBuffers[bufferIndex];
buffer.clear();
byte[] data = audioSamples.getData();
buffer.put(data);
audioEncoder.queueInputBuffer(bufferIndex, 0, data.length, presTime, 0);
presTime += data.length * 125 / 12; // 1000000 microseconds / 48000hz / 2 bytes
}
drainAudio();
});
}
}
\ No newline at end of file
... ...
package io.livekit.android.sample.record.ui.theme
import androidx.compose.ui.graphics.Color
val Purple200 = Color(0xFFBB86FC)
val Purple500 = Color(0xFF6200EE)
val Purple700 = Color(0xFF3700B3)
val Teal200 = Color(0xFF03DAC5)
\ No newline at end of file
... ...
package io.livekit.android.sample.record.ui.theme
import androidx.compose.foundation.shape.RoundedCornerShape
import androidx.compose.material.Shapes
import androidx.compose.ui.unit.dp
val Shapes = Shapes(
small = RoundedCornerShape(4.dp),
medium = RoundedCornerShape(4.dp),
large = RoundedCornerShape(0.dp)
)
\ No newline at end of file
... ...
package io.livekit.android.sample.record.ui.theme
import androidx.compose.foundation.isSystemInDarkTheme
import androidx.compose.material.MaterialTheme
import androidx.compose.material.darkColors
import androidx.compose.material.lightColors
import androidx.compose.runtime.Composable
private val DarkColorPalette = darkColors(
primary = Purple200,
primaryVariant = Purple700,
secondary = Teal200
)
private val LightColorPalette = lightColors(
primary = Purple500,
primaryVariant = Purple700,
secondary = Teal200
/* Other default colors to override
background = Color.White,
surface = Color.White,
onPrimary = Color.White,
onSecondary = Color.Black,
onBackground = Color.Black,
onSurface = Color.Black,
*/
)
@Composable
fun LivekitandroidTheme(darkTheme: Boolean = isSystemInDarkTheme(), content: @Composable () -> Unit) {
val colors = if (darkTheme) {
DarkColorPalette
} else {
LightColorPalette
}
MaterialTheme(
colors = colors,
typography = Typography,
shapes = Shapes,
content = content
)
}
\ No newline at end of file
... ...
package io.livekit.android.sample.record.ui.theme
import androidx.compose.material.Typography
import androidx.compose.ui.text.TextStyle
import androidx.compose.ui.text.font.FontFamily
import androidx.compose.ui.text.font.FontWeight
import androidx.compose.ui.unit.sp
// Set of Material typography styles to start with
val Typography = Typography(
body1 = TextStyle(
fontFamily = FontFamily.Default,
fontWeight = FontWeight.Normal,
fontSize = 16.sp
)
/* Other default text styles to override
button = TextStyle(
fontFamily = FontFamily.Default,
fontWeight = FontWeight.W500,
fontSize = 14.sp
),
caption = TextStyle(
fontFamily = FontFamily.Default,
fontWeight = FontWeight.Normal,
fontSize = 12.sp
)
*/
)
\ No newline at end of file
... ...
<vector xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:aapt="http://schemas.android.com/aapt"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path android:pathData="M31,63.928c0,0 6.4,-11 12.1,-13.1c7.2,-2.6 26,-1.4 26,-1.4l38.1,38.1L107,108.928l-32,-1L31,63.928z">
<aapt:attr name="android:fillColor">
<gradient
android:endX="85.84757"
android:endY="92.4963"
android:startX="42.9492"
android:startY="49.59793"
android:type="linear">
<item
android:color="#44000000"
android:offset="0.0" />
<item
android:color="#00000000"
android:offset="1.0" />
</gradient>
</aapt:attr>
</path>
<path
android:fillColor="#FFFFFF"
android:fillType="nonZero"
android:pathData="M65.3,45.828l3.8,-6.6c0.2,-0.4 0.1,-0.9 -0.3,-1.1c-0.4,-0.2 -0.9,-0.1 -1.1,0.3l-3.9,6.7c-6.3,-2.8 -13.4,-2.8 -19.7,0l-3.9,-6.7c-0.2,-0.4 -0.7,-0.5 -1.1,-0.3C38.8,38.328 38.7,38.828 38.9,39.228l3.8,6.6C36.2,49.428 31.7,56.028 31,63.928h46C76.3,56.028 71.8,49.428 65.3,45.828zM43.4,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2c-0.3,-0.7 -0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C45.3,56.528 44.5,57.328 43.4,57.328L43.4,57.328zM64.6,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2s-0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C66.5,56.528 65.6,57.328 64.6,57.328L64.6,57.328z"
android:strokeWidth="1"
android:strokeColor="#00000000" />
</vector>
\ No newline at end of file
... ...
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path
android:fillColor="#3DDC84"
android:pathData="M0,0h108v108h-108z" />
<path
android:fillColor="#00000000"
android:pathData="M9,0L9,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,0L19,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,0L29,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,0L39,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,0L49,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,0L59,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,0L69,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,0L79,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M89,0L89,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M99,0L99,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,9L108,9"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,19L108,19"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,29L108,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,39L108,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,49L108,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,59L108,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,69L108,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,79L108,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,89L108,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,99L108,99"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,29L89,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,39L89,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,49L89,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,59L89,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,69L89,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,79L89,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,19L29,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,19L39,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,19L49,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,19L59,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,19L69,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,19L79,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
</vector>
... ...
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>
\ No newline at end of file
... ...
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>
\ No newline at end of file
... ...
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="purple_200">#FFBB86FC</color>
<color name="purple_500">#FF6200EE</color>
<color name="purple_700">#FF3700B3</color>
<color name="teal_200">#FF03DAC5</color>
<color name="teal_700">#FF018786</color>
<color name="black">#FF000000</color>
<color name="white">#FFFFFFFF</color>
</resources>
\ No newline at end of file
... ...
<resources>
<string name="app_name">sample-app-record-local</string>
</resources>
\ No newline at end of file
... ...
<?xml version="1.0" encoding="utf-8"?>
<resources>
<style name="Theme.Livekitandroid" parent="android:Theme.Material.Light.NoActionBar">
<item name="android:statusBarColor">@color/purple_700</item>
</style>
</resources>
\ No newline at end of file
... ...
package io.livekit.android.sample.record
import org.junit.Test
import org.junit.Assert.*
/**
* Example local unit test, which will execute on the development machine (host).
*
* See [testing documentation](http://d.android.com/tools/testing).
*/
class ExampleUnitTest {
@Test
fun addition_isCorrect() {
assertEquals(4, 2 + 2)
}
}
\ No newline at end of file
... ...
<?xml version="1.0" encoding="utf-8"?>
<network-security-config>
<domain-config cleartextTrafficPermitted="true">
<domain includeSubdomains="true">example.com</domain>
</domain-config>
<base-config cleartextTrafficPermitted="true">
<trust-anchors>
<certificates src="system" />
</trust-anchors>
</base-config>
</network-security-config>
\ No newline at end of file
... ... @@ -9,3 +9,4 @@ include ':sample-app-common'
include ':livekit-lint'
include ':video-encode-decode-test'
include ':sample-app-basic'
include ':sample-app-record-local'
... ...