正在显示
34 个修改的文件
包含
942 行增加
和
13 行删除
| @@ -8,7 +8,7 @@ import androidx.activity.ComponentActivity | @@ -8,7 +8,7 @@ import androidx.activity.ComponentActivity | ||
| 8 | import androidx.activity.result.contract.ActivityResultContracts | 8 | import androidx.activity.result.contract.ActivityResultContracts |
| 9 | import androidx.core.content.ContextCompat | 9 | import androidx.core.content.ContextCompat |
| 10 | 10 | ||
| 11 | -fun ComponentActivity.requestNeededPermissions() { | 11 | +fun ComponentActivity.requestNeededPermissions(onPermissionsGranted: (() -> Unit)? = null) { |
| 12 | val requestPermissionLauncher = | 12 | val requestPermissionLauncher = |
| 13 | registerForActivityResult( | 13 | registerForActivityResult( |
| 14 | ActivityResultContracts.RequestMultiplePermissions() | 14 | ActivityResultContracts.RequestMultiplePermissions() |
| @@ -24,6 +24,11 @@ fun ComponentActivity.requestNeededPermissions() { | @@ -24,6 +24,11 @@ fun ComponentActivity.requestNeededPermissions() { | ||
| 24 | .show() | 24 | .show() |
| 25 | } | 25 | } |
| 26 | } | 26 | } |
| 27 | + | ||
| 28 | + // If all granted, notify if needed. | ||
| 29 | + if (onPermissionsGranted != null && grants.all { it.value }) { | ||
| 30 | + onPermissionsGranted() | ||
| 31 | + } | ||
| 27 | } | 32 | } |
| 28 | 33 | ||
| 29 | val neededPermissions = listOf(Manifest.permission.RECORD_AUDIO, Manifest.permission.CAMERA) | 34 | val neededPermissions = listOf(Manifest.permission.RECORD_AUDIO, Manifest.permission.CAMERA) |
| @@ -39,5 +44,7 @@ fun ComponentActivity.requestNeededPermissions() { | @@ -39,5 +44,7 @@ fun ComponentActivity.requestNeededPermissions() { | ||
| 39 | 44 | ||
| 40 | if (neededPermissions.isNotEmpty()) { | 45 | if (neededPermissions.isNotEmpty()) { |
| 41 | requestPermissionLauncher.launch(neededPermissions) | 46 | requestPermissionLauncher.launch(neededPermissions) |
| 47 | + } else { | ||
| 48 | + onPermissionsGranted?.invoke() | ||
| 42 | } | 49 | } |
| 43 | } | 50 | } |
sample-app-record-local/.gitignore
0 → 100644
| 1 | +/build |
sample-app-record-local/README.md
0 → 100644
| 1 | +# sample-app-record-local | ||
| 2 | + | ||
| 3 | +An example showing how to save the local device's audio and video tracks. | ||
| 4 | + | ||
| 5 | +While connected to a Room, this app will save a video from your microphone and camera. Audio samples | ||
| 6 | +and video frames are passed into a `VideoFileRenderer` object, where they are then encoded using | ||
| 7 | +`android.media.MediaCodec`and saved using a `android.media.MediaMuxer` into a video file. | ||
| 8 | + | ||
| 9 | +Videos are saved to the app's external files directory ( | ||
| 10 | +normally `/sdcard/Android/data/io.livekit.android.sample.record/files/Movies`). |
sample-app-record-local/build.gradle
0 → 100644
| 1 | +plugins { | ||
| 2 | + id 'com.android.application' | ||
| 3 | + id 'org.jetbrains.kotlin.android' | ||
| 4 | +} | ||
| 5 | + | ||
| 6 | +android { | ||
| 7 | + compileSdk 32 | ||
| 8 | + | ||
| 9 | + defaultConfig { | ||
| 10 | + applicationId "io.livekit.android.sample.record" | ||
| 11 | + minSdk 21 | ||
| 12 | + targetSdk 32 | ||
| 13 | + versionCode 1 | ||
| 14 | + versionName "1.0" | ||
| 15 | + | ||
| 16 | + testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" | ||
| 17 | + vectorDrawables { | ||
| 18 | + useSupportLibrary true | ||
| 19 | + } | ||
| 20 | + } | ||
| 21 | + | ||
| 22 | + buildTypes { | ||
| 23 | + release { | ||
| 24 | + minifyEnabled false | ||
| 25 | + proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro' | ||
| 26 | + } | ||
| 27 | + } | ||
| 28 | + compileOptions { | ||
| 29 | + sourceCompatibility JavaVersion.VERSION_1_8 | ||
| 30 | + targetCompatibility JavaVersion.VERSION_1_8 | ||
| 31 | + } | ||
| 32 | + kotlinOptions { | ||
| 33 | + jvmTarget = '1.8' | ||
| 34 | + } | ||
| 35 | + buildFeatures { | ||
| 36 | + compose true | ||
| 37 | + } | ||
| 38 | + composeOptions { | ||
| 39 | + kotlinCompilerExtensionVersion compose_compiler_version | ||
| 40 | + } | ||
| 41 | + packagingOptions { | ||
| 42 | + resources { | ||
| 43 | + excludes += '/META-INF/{AL2.0,LGPL2.1}' | ||
| 44 | + } | ||
| 45 | + } | ||
| 46 | +} | ||
| 47 | + | ||
| 48 | +dependencies { | ||
| 49 | + | ||
| 50 | + implementation project(":sample-app-common") | ||
| 51 | + implementation "androidx.compose.ui:ui:$compose_version" | ||
| 52 | + implementation "androidx.compose.material:material:$compose_version" | ||
| 53 | + implementation "androidx.compose.ui:ui-tooling-preview:$compose_version" | ||
| 54 | + implementation "androidx.compose.runtime:runtime-livedata:$compose_version" | ||
| 55 | + implementation 'androidx.lifecycle:lifecycle-runtime-ktx:2.3.1' | ||
| 56 | + implementation 'androidx.activity:activity-compose:1.3.1' | ||
| 57 | + testImplementation 'junit:junit:4.13.2' | ||
| 58 | + androidTestImplementation 'androidx.test.ext:junit:1.1.3' | ||
| 59 | + androidTestImplementation 'androidx.test.espresso:espresso-core:3.4.0' | ||
| 60 | + androidTestImplementation "androidx.compose.ui:ui-test-junit4:$compose_version" | ||
| 61 | + debugImplementation "androidx.compose.ui:ui-tooling:$compose_version" | ||
| 62 | + debugImplementation "androidx.compose.ui:ui-test-manifest:$compose_version" | ||
| 63 | +} |
sample-app-record-local/proguard-rules.pro
0 → 100644
| 1 | +# Add project specific ProGuard rules here. | ||
| 2 | +# You can control the set of applied configuration files using the | ||
| 3 | +# proguardFiles setting in build.gradle. | ||
| 4 | +# | ||
| 5 | +# For more details, see | ||
| 6 | +# http://developer.android.com/guide/developing/tools/proguard.html | ||
| 7 | + | ||
| 8 | +# If your project uses WebView with JS, uncomment the following | ||
| 9 | +# and specify the fully qualified class name to the JavaScript interface | ||
| 10 | +# class: | ||
| 11 | +#-keepclassmembers class fqcn.of.javascript.interface.for.webview { | ||
| 12 | +# public *; | ||
| 13 | +#} | ||
| 14 | + | ||
| 15 | +# Uncomment this to preserve the line number information for | ||
| 16 | +# debugging stack traces. | ||
| 17 | +#-keepattributes SourceFile,LineNumberTable | ||
| 18 | + | ||
| 19 | +# If you keep the line number information, uncomment this to | ||
| 20 | +# hide the original source file name. | ||
| 21 | +#-renamesourcefileattribute SourceFile |
| 1 | +package io.livekit.android.sample.record | ||
| 2 | + | ||
| 3 | +import androidx.test.platform.app.InstrumentationRegistry | ||
| 4 | +import androidx.test.ext.junit.runners.AndroidJUnit4 | ||
| 5 | + | ||
| 6 | +import org.junit.Test | ||
| 7 | +import org.junit.runner.RunWith | ||
| 8 | + | ||
| 9 | +import org.junit.Assert.* | ||
| 10 | + | ||
| 11 | +/** | ||
| 12 | + * Instrumented test, which will execute on an Android device. | ||
| 13 | + * | ||
| 14 | + * See [testing documentation](http://d.android.com/tools/testing). | ||
| 15 | + */ | ||
| 16 | +@RunWith(AndroidJUnit4::class) | ||
| 17 | +class ExampleInstrumentedTest { | ||
| 18 | + @Test | ||
| 19 | + fun useAppContext() { | ||
| 20 | + // Context of the app under test. | ||
| 21 | + val appContext = InstrumentationRegistry.getInstrumentation().targetContext | ||
| 22 | + assertEquals("io.livekit.android.sample.record", appContext.packageName) | ||
| 23 | + } | ||
| 24 | +} |
| 1 | +<?xml version="1.0" encoding="utf-8"?> | ||
| 2 | +<manifest xmlns:android="http://schemas.android.com/apk/res/android" | ||
| 3 | + package="io.livekit.android.sample.record"> | ||
| 4 | + | ||
| 5 | + <application | ||
| 6 | + android:allowBackup="true" | ||
| 7 | + android:icon="@mipmap/ic_launcher" | ||
| 8 | + android:label="@string/app_name" | ||
| 9 | + android:networkSecurityConfig="@xml/network_security_config" | ||
| 10 | + android:roundIcon="@mipmap/ic_launcher_round" | ||
| 11 | + android:supportsRtl="true" | ||
| 12 | + android:theme="@style/Theme.Livekitandroid"> | ||
| 13 | + <activity | ||
| 14 | + android:name=".MainActivity" | ||
| 15 | + android:exported="true" | ||
| 16 | + android:label="@string/app_name" | ||
| 17 | + android:theme="@style/Theme.Livekitandroid"> | ||
| 18 | + <intent-filter> | ||
| 19 | + <action android:name="android.intent.action.MAIN" /> | ||
| 20 | + | ||
| 21 | + <category android:name="android.intent.category.LAUNCHER" /> | ||
| 22 | + </intent-filter> | ||
| 23 | + </activity> | ||
| 24 | + </application> | ||
| 25 | + | ||
| 26 | +</manifest> |
| 1 | +package io.livekit.android.sample.record | ||
| 2 | + | ||
| 3 | +import android.os.Bundle | ||
| 4 | +import android.os.Environment | ||
| 5 | +import androidx.activity.ComponentActivity | ||
| 6 | +import androidx.activity.compose.setContent | ||
| 7 | +import androidx.compose.foundation.layout.Column | ||
| 8 | +import androidx.compose.foundation.layout.fillMaxSize | ||
| 9 | +import androidx.compose.material.Button | ||
| 10 | +import androidx.compose.material.MaterialTheme | ||
| 11 | +import androidx.compose.material.Surface | ||
| 12 | +import androidx.compose.material.Text | ||
| 13 | +import androidx.compose.runtime.getValue | ||
| 14 | +import androidx.compose.runtime.livedata.observeAsState | ||
| 15 | +import androidx.compose.ui.Modifier | ||
| 16 | +import androidx.lifecycle.MutableLiveData | ||
| 17 | +import androidx.lifecycle.lifecycleScope | ||
| 18 | +import io.livekit.android.LiveKit | ||
| 19 | +import io.livekit.android.LiveKitOverrides | ||
| 20 | +import io.livekit.android.room.Room | ||
| 21 | +import io.livekit.android.room.track.LocalVideoTrack | ||
| 22 | +import io.livekit.android.room.track.Track | ||
| 23 | +import io.livekit.android.sample.record.ui.theme.LivekitandroidTheme | ||
| 24 | +import io.livekit.android.sample.util.requestNeededPermissions | ||
| 25 | +import kotlinx.coroutines.launch | ||
| 26 | +import org.webrtc.EglBase | ||
| 27 | +import java.io.File | ||
| 28 | +import java.io.IOException | ||
| 29 | +import java.util.* | ||
| 30 | + | ||
| 31 | +class MainActivity : ComponentActivity() { | ||
| 32 | + lateinit var room: Room | ||
| 33 | + var videoFileRenderer: VideoFileRenderer? = null | ||
| 34 | + val connected = MutableLiveData(false) | ||
| 35 | + | ||
| 36 | + override fun onCreate(savedInstanceState: Bundle?) { | ||
| 37 | + super.onCreate(savedInstanceState) | ||
| 38 | + | ||
| 39 | + // Create Room object. | ||
| 40 | + room = LiveKit.create( | ||
| 41 | + appContext = applicationContext, | ||
| 42 | + overrides = LiveKitOverrides( | ||
| 43 | + javaAudioDeviceModuleCustomizer = { builder -> | ||
| 44 | + // Receive audio samples | ||
| 45 | + builder.setSamplesReadyCallback { samples -> | ||
| 46 | + videoFileRenderer?.onWebRtcAudioRecordSamplesReady(samples) | ||
| 47 | + } | ||
| 48 | + } | ||
| 49 | + ) | ||
| 50 | + ) | ||
| 51 | + | ||
| 52 | + setContent { | ||
| 53 | + LivekitandroidTheme { | ||
| 54 | + Surface(modifier = Modifier.fillMaxSize(), color = MaterialTheme.colors.background) { | ||
| 55 | + Column { | ||
| 56 | + val isConnected by connected.observeAsState(false) | ||
| 57 | + | ||
| 58 | + if (isConnected) { | ||
| 59 | + Text(text = "Connected!") | ||
| 60 | + Button(onClick = { disconnectRoom() }) { | ||
| 61 | + Text("Disconnect") | ||
| 62 | + } | ||
| 63 | + } else { | ||
| 64 | + Text(text = "Not Connected.") | ||
| 65 | + Button(onClick = { connectToRoom() }) { | ||
| 66 | + Text("Connect") | ||
| 67 | + } | ||
| 68 | + } | ||
| 69 | + } | ||
| 70 | + } | ||
| 71 | + } | ||
| 72 | + } | ||
| 73 | + | ||
| 74 | + requestNeededPermissions() | ||
| 75 | + } | ||
| 76 | + | ||
| 77 | + private fun connectToRoom() { | ||
| 78 | + | ||
| 79 | + val url = "wss://www.example.com" | ||
| 80 | + val token = "" | ||
| 81 | + | ||
| 82 | + lifecycleScope.launch { | ||
| 83 | + | ||
| 84 | + // Connect to server. | ||
| 85 | + room.connect( | ||
| 86 | + url, | ||
| 87 | + token, | ||
| 88 | + ) | ||
| 89 | + | ||
| 90 | + val localParticipant = room.localParticipant | ||
| 91 | + localParticipant.setMicrophoneEnabled(true) | ||
| 92 | + localParticipant.setCameraEnabled(true) | ||
| 93 | + | ||
| 94 | + // Create output file. | ||
| 95 | + val dir = getExternalFilesDir(Environment.DIRECTORY_MOVIES) | ||
| 96 | + val file = File(dir, "${Date().time}.mp4") | ||
| 97 | + if (!file.createNewFile()) { | ||
| 98 | + throw IOException() | ||
| 99 | + } | ||
| 100 | + | ||
| 101 | + // Setup video recording | ||
| 102 | + val videoFileRenderer = VideoFileRenderer( | ||
| 103 | + file.absolutePath, | ||
| 104 | + EglBase.create().eglBaseContext, | ||
| 105 | + true | ||
| 106 | + ) | ||
| 107 | + this@MainActivity.videoFileRenderer = videoFileRenderer | ||
| 108 | + | ||
| 109 | + // Attach to local video track. | ||
| 110 | + val track = localParticipant.getTrackPublication(Track.Source.CAMERA)?.track as LocalVideoTrack | ||
| 111 | + track.addRenderer(videoFileRenderer) | ||
| 112 | + | ||
| 113 | + connected.value = true | ||
| 114 | + } | ||
| 115 | + } | ||
| 116 | + | ||
| 117 | + fun disconnectRoom() { | ||
| 118 | + room.disconnect() | ||
| 119 | + videoFileRenderer?.release() | ||
| 120 | + videoFileRenderer = null | ||
| 121 | + connected.value = false | ||
| 122 | + } | ||
| 123 | +} | ||
| 124 | + |
sample-app-record-local/src/main/java/io/livekit/android/sample/record/VideoFileRenderer.java
0 → 100644
| 1 | +/** | ||
| 2 | + * MIT License | ||
| 3 | + * <p> | ||
| 4 | + * Copyright (c) 2018 湖北捷智云技术有限公司 | ||
| 5 | + * <p> | ||
| 6 | + * Permission is hereby granted, free of charge, to any person obtaining a copy | ||
| 7 | + * of this software and associated documentation files (the "Software"), to deal | ||
| 8 | + * in the Software without restriction, including without limitation the rights | ||
| 9 | + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||
| 10 | + * copies of the Software, and to permit persons to whom the Software is | ||
| 11 | + * furnished to do so, subject to the following conditions: | ||
| 12 | + * <p> | ||
| 13 | + * The above copyright notice and this permission notice shall be included in all | ||
| 14 | + * copies or substantial portions of the Software. | ||
| 15 | + * <p> | ||
| 16 | + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||
| 17 | + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||
| 18 | + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | ||
| 19 | + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||
| 20 | + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | ||
| 21 | + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | ||
| 22 | + * SOFTWARE. | ||
| 23 | + * <p> | ||
| 24 | + * From https://github.com/flutter-webrtc/flutter-webrtc/blob/main/android/src/main/java/com/cloudwebrtc/webrtc/record/VideoFileRenderer.java | ||
| 25 | + */ | ||
| 26 | + | ||
| 27 | +package io.livekit.android.sample.record; | ||
| 28 | + | ||
| 29 | +import android.media.MediaCodec; | ||
| 30 | +import android.media.MediaCodecInfo; | ||
| 31 | +import android.media.MediaFormat; | ||
| 32 | +import android.media.MediaMuxer; | ||
| 33 | +import android.os.Handler; | ||
| 34 | +import android.os.HandlerThread; | ||
| 35 | +import android.util.Log; | ||
| 36 | +import android.view.Surface; | ||
| 37 | + | ||
| 38 | +import org.webrtc.EglBase; | ||
| 39 | +import org.webrtc.GlRectDrawer; | ||
| 40 | +import org.webrtc.VideoFrame; | ||
| 41 | +import org.webrtc.VideoFrameDrawer; | ||
| 42 | +import org.webrtc.VideoSink; | ||
| 43 | +import org.webrtc.audio.JavaAudioDeviceModule; | ||
| 44 | +import org.webrtc.audio.JavaAudioDeviceModule.SamplesReadyCallback; | ||
| 45 | + | ||
| 46 | +import java.io.IOException; | ||
| 47 | +import java.nio.ByteBuffer; | ||
| 48 | + | ||
| 49 | +public class VideoFileRenderer implements VideoSink, SamplesReadyCallback { | ||
| 50 | + private static final String TAG = "VideoFileRenderer"; | ||
| 51 | + private final HandlerThread renderThread; | ||
| 52 | + private final Handler renderThreadHandler; | ||
| 53 | + private final HandlerThread audioThread; | ||
| 54 | + private final Handler audioThreadHandler; | ||
| 55 | + private int outputFileWidth = -1; | ||
| 56 | + private int outputFileHeight = -1; | ||
| 57 | + private ByteBuffer[] encoderOutputBuffers; | ||
| 58 | + private ByteBuffer[] audioInputBuffers; | ||
| 59 | + private ByteBuffer[] audioOutputBuffers; | ||
| 60 | + private EglBase eglBase; | ||
| 61 | + private EglBase.Context sharedContext; | ||
| 62 | + private VideoFrameDrawer frameDrawer; | ||
| 63 | + | ||
| 64 | + // TODO: these ought to be configurable as well | ||
| 65 | + private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding | ||
| 66 | + private static final int FRAME_RATE = 30; // 30fps | ||
| 67 | + private static final int IFRAME_INTERVAL = 5; // 5 seconds between I-frames | ||
| 68 | + | ||
| 69 | + private MediaMuxer mediaMuxer; | ||
| 70 | + private MediaCodec encoder; | ||
| 71 | + private MediaCodec.BufferInfo bufferInfo, audioBufferInfo; | ||
| 72 | + private int trackIndex = -1; | ||
| 73 | + private int audioTrackIndex; | ||
| 74 | + private boolean isRunning = true; | ||
| 75 | + private GlRectDrawer drawer; | ||
| 76 | + private Surface surface; | ||
| 77 | + private MediaCodec audioEncoder; | ||
| 78 | + | ||
| 79 | + VideoFileRenderer(String outputFile, final EglBase.Context sharedContext, boolean withAudio) throws IOException { | ||
| 80 | + renderThread = new HandlerThread(TAG + "RenderThread"); | ||
| 81 | + renderThread.start(); | ||
| 82 | + renderThreadHandler = new Handler(renderThread.getLooper()); | ||
| 83 | + if (withAudio) { | ||
| 84 | + audioThread = new HandlerThread(TAG + "AudioThread"); | ||
| 85 | + audioThread.start(); | ||
| 86 | + audioThreadHandler = new Handler(audioThread.getLooper()); | ||
| 87 | + } else { | ||
| 88 | + audioThread = null; | ||
| 89 | + audioThreadHandler = null; | ||
| 90 | + } | ||
| 91 | + bufferInfo = new MediaCodec.BufferInfo(); | ||
| 92 | + this.sharedContext = sharedContext; | ||
| 93 | + | ||
| 94 | + // Create a MediaMuxer. We can't add the video track and start() the muxer here, | ||
| 95 | + // because our MediaFormat doesn't have the Magic Goodies. These can only be | ||
| 96 | + // obtained from the encoder after it has started processing data. | ||
| 97 | + mediaMuxer = new MediaMuxer(outputFile, | ||
| 98 | + MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); | ||
| 99 | + | ||
| 100 | + audioTrackIndex = withAudio ? -1 : 0; | ||
| 101 | + } | ||
| 102 | + | ||
| 103 | + private void initVideoEncoder() { | ||
| 104 | + MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, outputFileWidth, outputFileHeight); | ||
| 105 | + | ||
| 106 | + // Set some properties. Failing to specify some of these can cause the MediaCodec | ||
| 107 | + // configure() call to throw an unhelpful exception. | ||
| 108 | + format.setInteger(MediaFormat.KEY_COLOR_FORMAT, | ||
| 109 | + MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); | ||
| 110 | + format.setInteger(MediaFormat.KEY_BIT_RATE, 6000000); | ||
| 111 | + format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE); | ||
| 112 | + format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); | ||
| 113 | + | ||
| 114 | + // Create a MediaCodec encoder, and configure it with our format. Get a Surface | ||
| 115 | + // we can use for input and wrap it with a class that handles the EGL work. | ||
| 116 | + try { | ||
| 117 | + encoder = MediaCodec.createEncoderByType(MIME_TYPE); | ||
| 118 | + encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); | ||
| 119 | + renderThreadHandler.post(() -> { | ||
| 120 | + eglBase = EglBase.create(sharedContext, EglBase.CONFIG_RECORDABLE); | ||
| 121 | + surface = encoder.createInputSurface(); | ||
| 122 | + eglBase.createSurface(surface); | ||
| 123 | + eglBase.makeCurrent(); | ||
| 124 | + drawer = new GlRectDrawer(); | ||
| 125 | + }); | ||
| 126 | + } catch (Exception e) { | ||
| 127 | + Log.wtf(TAG, e); | ||
| 128 | + } | ||
| 129 | + } | ||
| 130 | + | ||
| 131 | + @Override | ||
| 132 | + public void onFrame(VideoFrame frame) { | ||
| 133 | + frame.retain(); | ||
| 134 | + if (outputFileWidth == -1) { | ||
| 135 | + outputFileWidth = frame.getRotatedWidth(); | ||
| 136 | + outputFileHeight = frame.getRotatedHeight(); | ||
| 137 | + initVideoEncoder(); | ||
| 138 | + } | ||
| 139 | + renderThreadHandler.post(() -> renderFrameOnRenderThread(frame)); | ||
| 140 | + } | ||
| 141 | + | ||
| 142 | + private void renderFrameOnRenderThread(VideoFrame frame) { | ||
| 143 | + if (frameDrawer == null) { | ||
| 144 | + frameDrawer = new VideoFrameDrawer(); | ||
| 145 | + } | ||
| 146 | + frameDrawer.drawFrame(frame, drawer, null, 0, 0, outputFileWidth, outputFileHeight); | ||
| 147 | + frame.release(); | ||
| 148 | + drainEncoder(); | ||
| 149 | + eglBase.swapBuffers(); | ||
| 150 | + } | ||
| 151 | + | ||
| 152 | + /** | ||
| 153 | + * Release all resources. All already posted frames will be rendered first. | ||
| 154 | + */ | ||
| 155 | + void release() { | ||
| 156 | + isRunning = false; | ||
| 157 | + if (audioThreadHandler != null) | ||
| 158 | + audioThreadHandler.post(() -> { | ||
| 159 | + if (audioEncoder != null) { | ||
| 160 | + audioEncoder.stop(); | ||
| 161 | + audioEncoder.release(); | ||
| 162 | + } | ||
| 163 | + audioThread.quit(); | ||
| 164 | + audioThreadHandler.removeCallbacksAndMessages(null); | ||
| 165 | + }); | ||
| 166 | + renderThreadHandler.post(() -> { | ||
| 167 | + if (encoder != null) { | ||
| 168 | + encoder.stop(); | ||
| 169 | + encoder.release(); | ||
| 170 | + } | ||
| 171 | + eglBase.release(); | ||
| 172 | + mediaMuxer.stop(); | ||
| 173 | + mediaMuxer.release(); | ||
| 174 | + renderThread.quit(); | ||
| 175 | + renderThreadHandler.removeCallbacksAndMessages(null); | ||
| 176 | + }); | ||
| 177 | + } | ||
| 178 | + | ||
| 179 | + private boolean encoderStarted = false; | ||
| 180 | + private volatile boolean muxerStarted = false; | ||
| 181 | + private long videoFrameStart = 0; | ||
| 182 | + | ||
| 183 | + private void drainEncoder() { | ||
| 184 | + if (!encoderStarted) { | ||
| 185 | + encoder.start(); | ||
| 186 | + encoderOutputBuffers = encoder.getOutputBuffers(); | ||
| 187 | + encoderStarted = true; | ||
| 188 | + return; | ||
| 189 | + } | ||
| 190 | + while (true) { | ||
| 191 | + int encoderStatus = encoder.dequeueOutputBuffer(bufferInfo, 10000); | ||
| 192 | + if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { | ||
| 193 | + break; | ||
| 194 | + } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { | ||
| 195 | + // not expected for an encoder | ||
| 196 | + encoderOutputBuffers = encoder.getOutputBuffers(); | ||
| 197 | + Log.e(TAG, "encoder output buffers changed"); | ||
| 198 | + } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { | ||
| 199 | + // not expected for an encoder | ||
| 200 | + MediaFormat newFormat = encoder.getOutputFormat(); | ||
| 201 | + | ||
| 202 | + Log.e(TAG, "encoder output format changed: " + newFormat); | ||
| 203 | + trackIndex = mediaMuxer.addTrack(newFormat); | ||
| 204 | + if (audioTrackIndex != -1 && !muxerStarted) { | ||
| 205 | + mediaMuxer.start(); | ||
| 206 | + muxerStarted = true; | ||
| 207 | + } | ||
| 208 | + if (!muxerStarted) | ||
| 209 | + break; | ||
| 210 | + } else if (encoderStatus < 0) { | ||
| 211 | + Log.e(TAG, "unexpected result fr om encoder.dequeueOutputBuffer: " + encoderStatus); | ||
| 212 | + } else { // encoderStatus >= 0 | ||
| 213 | + try { | ||
| 214 | + ByteBuffer encodedData = encoderOutputBuffers[encoderStatus]; | ||
| 215 | + if (encodedData == null) { | ||
| 216 | + Log.e(TAG, "encoderOutputBuffer " + encoderStatus + " was null"); | ||
| 217 | + break; | ||
| 218 | + } | ||
| 219 | + // It's usually necessary to adjust the ByteBuffer values to match BufferInfo. | ||
| 220 | + encodedData.position(bufferInfo.offset); | ||
| 221 | + encodedData.limit(bufferInfo.offset + bufferInfo.size); | ||
| 222 | + if (videoFrameStart == 0 && bufferInfo.presentationTimeUs != 0) { | ||
| 223 | + videoFrameStart = bufferInfo.presentationTimeUs; | ||
| 224 | + } | ||
| 225 | + bufferInfo.presentationTimeUs -= videoFrameStart; | ||
| 226 | + if (muxerStarted) | ||
| 227 | + mediaMuxer.writeSampleData(trackIndex, encodedData, bufferInfo); | ||
| 228 | + isRunning = isRunning && (bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == 0; | ||
| 229 | + encoder.releaseOutputBuffer(encoderStatus, false); | ||
| 230 | + if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { | ||
| 231 | + break; | ||
| 232 | + } | ||
| 233 | + } catch (Exception e) { | ||
| 234 | + Log.wtf(TAG, e); | ||
| 235 | + break; | ||
| 236 | + } | ||
| 237 | + } | ||
| 238 | + } | ||
| 239 | + } | ||
| 240 | + | ||
| 241 | + private long presTime = 0L; | ||
| 242 | + | ||
| 243 | + private void drainAudio() { | ||
| 244 | + if (audioBufferInfo == null) | ||
| 245 | + audioBufferInfo = new MediaCodec.BufferInfo(); | ||
| 246 | + while (true) { | ||
| 247 | + int encoderStatus = audioEncoder.dequeueOutputBuffer(audioBufferInfo, 10000); | ||
| 248 | + if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { | ||
| 249 | + break; | ||
| 250 | + } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { | ||
| 251 | + // not expected for an encoder | ||
| 252 | + audioOutputBuffers = audioEncoder.getOutputBuffers(); | ||
| 253 | + Log.w(TAG, "encoder output buffers changed"); | ||
| 254 | + } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { | ||
| 255 | + // not expected for an encoder | ||
| 256 | + MediaFormat newFormat = audioEncoder.getOutputFormat(); | ||
| 257 | + | ||
| 258 | + Log.w(TAG, "encoder output format changed: " + newFormat); | ||
| 259 | + audioTrackIndex = mediaMuxer.addTrack(newFormat); | ||
| 260 | + if (trackIndex != -1 && !muxerStarted) { | ||
| 261 | + mediaMuxer.start(); | ||
| 262 | + muxerStarted = true; | ||
| 263 | + } | ||
| 264 | + if (!muxerStarted) | ||
| 265 | + break; | ||
| 266 | + } else if (encoderStatus < 0) { | ||
| 267 | + Log.e(TAG, "unexpected result fr om encoder.dequeueOutputBuffer: " + encoderStatus); | ||
| 268 | + } else { // encoderStatus >= 0 | ||
| 269 | + try { | ||
| 270 | + ByteBuffer encodedData = audioOutputBuffers[encoderStatus]; | ||
| 271 | + if (encodedData == null) { | ||
| 272 | + Log.e(TAG, "encoderOutputBuffer " + encoderStatus + " was null"); | ||
| 273 | + break; | ||
| 274 | + } | ||
| 275 | + // It's usually necessary to adjust the ByteBuffer values to match BufferInfo. | ||
| 276 | + encodedData.position(audioBufferInfo.offset); | ||
| 277 | + encodedData.limit(audioBufferInfo.offset + audioBufferInfo.size); | ||
| 278 | + if (muxerStarted) | ||
| 279 | + mediaMuxer.writeSampleData(audioTrackIndex, encodedData, audioBufferInfo); | ||
| 280 | + isRunning = isRunning && (audioBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == 0; | ||
| 281 | + audioEncoder.releaseOutputBuffer(encoderStatus, false); | ||
| 282 | + if ((audioBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { | ||
| 283 | + break; | ||
| 284 | + } | ||
| 285 | + } catch (Exception e) { | ||
| 286 | + Log.wtf(TAG, e); | ||
| 287 | + break; | ||
| 288 | + } | ||
| 289 | + } | ||
| 290 | + } | ||
| 291 | + } | ||
| 292 | + | ||
| 293 | + @Override | ||
| 294 | + public void onWebRtcAudioRecordSamplesReady(JavaAudioDeviceModule.AudioSamples audioSamples) { | ||
| 295 | + if (!isRunning) | ||
| 296 | + return; | ||
| 297 | + audioThreadHandler.post(() -> { | ||
| 298 | + if (audioEncoder == null) try { | ||
| 299 | + audioEncoder = MediaCodec.createEncoderByType("audio/mp4a-latm"); | ||
| 300 | + MediaFormat format = new MediaFormat(); | ||
| 301 | + format.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm"); | ||
| 302 | + format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, audioSamples.getChannelCount()); | ||
| 303 | + format.setInteger(MediaFormat.KEY_SAMPLE_RATE, audioSamples.getSampleRate()); | ||
| 304 | + format.setInteger(MediaFormat.KEY_BIT_RATE, 64 * 1024); | ||
| 305 | + format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC); | ||
| 306 | + audioEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); | ||
| 307 | + audioEncoder.start(); | ||
| 308 | + audioInputBuffers = audioEncoder.getInputBuffers(); | ||
| 309 | + audioOutputBuffers = audioEncoder.getOutputBuffers(); | ||
| 310 | + } catch (IOException exception) { | ||
| 311 | + Log.wtf(TAG, exception); | ||
| 312 | + } | ||
| 313 | + int bufferIndex = audioEncoder.dequeueInputBuffer(0); | ||
| 314 | + if (bufferIndex >= 0) { | ||
| 315 | + ByteBuffer buffer = audioInputBuffers[bufferIndex]; | ||
| 316 | + buffer.clear(); | ||
| 317 | + byte[] data = audioSamples.getData(); | ||
| 318 | + buffer.put(data); | ||
| 319 | + audioEncoder.queueInputBuffer(bufferIndex, 0, data.length, presTime, 0); | ||
| 320 | + presTime += data.length * 125 / 12; // 1000000 microseconds / 48000hz / 2 bytes | ||
| 321 | + } | ||
| 322 | + drainAudio(); | ||
| 323 | + }); | ||
| 324 | + } | ||
| 325 | + | ||
| 326 | +} |
| 1 | +package io.livekit.android.sample.record.ui.theme | ||
| 2 | + | ||
| 3 | +import androidx.compose.foundation.shape.RoundedCornerShape | ||
| 4 | +import androidx.compose.material.Shapes | ||
| 5 | +import androidx.compose.ui.unit.dp | ||
| 6 | + | ||
| 7 | +val Shapes = Shapes( | ||
| 8 | + small = RoundedCornerShape(4.dp), | ||
| 9 | + medium = RoundedCornerShape(4.dp), | ||
| 10 | + large = RoundedCornerShape(0.dp) | ||
| 11 | +) |
| 1 | +package io.livekit.android.sample.record.ui.theme | ||
| 2 | + | ||
| 3 | +import androidx.compose.foundation.isSystemInDarkTheme | ||
| 4 | +import androidx.compose.material.MaterialTheme | ||
| 5 | +import androidx.compose.material.darkColors | ||
| 6 | +import androidx.compose.material.lightColors | ||
| 7 | +import androidx.compose.runtime.Composable | ||
| 8 | + | ||
| 9 | +private val DarkColorPalette = darkColors( | ||
| 10 | + primary = Purple200, | ||
| 11 | + primaryVariant = Purple700, | ||
| 12 | + secondary = Teal200 | ||
| 13 | +) | ||
| 14 | + | ||
| 15 | +private val LightColorPalette = lightColors( | ||
| 16 | + primary = Purple500, | ||
| 17 | + primaryVariant = Purple700, | ||
| 18 | + secondary = Teal200 | ||
| 19 | + | ||
| 20 | + /* Other default colors to override | ||
| 21 | + background = Color.White, | ||
| 22 | + surface = Color.White, | ||
| 23 | + onPrimary = Color.White, | ||
| 24 | + onSecondary = Color.Black, | ||
| 25 | + onBackground = Color.Black, | ||
| 26 | + onSurface = Color.Black, | ||
| 27 | + */ | ||
| 28 | +) | ||
| 29 | + | ||
| 30 | +@Composable | ||
| 31 | +fun LivekitandroidTheme(darkTheme: Boolean = isSystemInDarkTheme(), content: @Composable () -> Unit) { | ||
| 32 | + val colors = if (darkTheme) { | ||
| 33 | + DarkColorPalette | ||
| 34 | + } else { | ||
| 35 | + LightColorPalette | ||
| 36 | + } | ||
| 37 | + | ||
| 38 | + MaterialTheme( | ||
| 39 | + colors = colors, | ||
| 40 | + typography = Typography, | ||
| 41 | + shapes = Shapes, | ||
| 42 | + content = content | ||
| 43 | + ) | ||
| 44 | +} |
| 1 | +package io.livekit.android.sample.record.ui.theme | ||
| 2 | + | ||
| 3 | +import androidx.compose.material.Typography | ||
| 4 | +import androidx.compose.ui.text.TextStyle | ||
| 5 | +import androidx.compose.ui.text.font.FontFamily | ||
| 6 | +import androidx.compose.ui.text.font.FontWeight | ||
| 7 | +import androidx.compose.ui.unit.sp | ||
| 8 | + | ||
| 9 | +// Set of Material typography styles to start with | ||
| 10 | +val Typography = Typography( | ||
| 11 | + body1 = TextStyle( | ||
| 12 | + fontFamily = FontFamily.Default, | ||
| 13 | + fontWeight = FontWeight.Normal, | ||
| 14 | + fontSize = 16.sp | ||
| 15 | + ) | ||
| 16 | + /* Other default text styles to override | ||
| 17 | + button = TextStyle( | ||
| 18 | + fontFamily = FontFamily.Default, | ||
| 19 | + fontWeight = FontWeight.W500, | ||
| 20 | + fontSize = 14.sp | ||
| 21 | + ), | ||
| 22 | + caption = TextStyle( | ||
| 23 | + fontFamily = FontFamily.Default, | ||
| 24 | + fontWeight = FontWeight.Normal, | ||
| 25 | + fontSize = 12.sp | ||
| 26 | + ) | ||
| 27 | + */ | ||
| 28 | +) |
| 1 | +<vector xmlns:android="http://schemas.android.com/apk/res/android" | ||
| 2 | + xmlns:aapt="http://schemas.android.com/aapt" | ||
| 3 | + android:width="108dp" | ||
| 4 | + android:height="108dp" | ||
| 5 | + android:viewportWidth="108" | ||
| 6 | + android:viewportHeight="108"> | ||
| 7 | + <path android:pathData="M31,63.928c0,0 6.4,-11 12.1,-13.1c7.2,-2.6 26,-1.4 26,-1.4l38.1,38.1L107,108.928l-32,-1L31,63.928z"> | ||
| 8 | + <aapt:attr name="android:fillColor"> | ||
| 9 | + <gradient | ||
| 10 | + android:endX="85.84757" | ||
| 11 | + android:endY="92.4963" | ||
| 12 | + android:startX="42.9492" | ||
| 13 | + android:startY="49.59793" | ||
| 14 | + android:type="linear"> | ||
| 15 | + <item | ||
| 16 | + android:color="#44000000" | ||
| 17 | + android:offset="0.0" /> | ||
| 18 | + <item | ||
| 19 | + android:color="#00000000" | ||
| 20 | + android:offset="1.0" /> | ||
| 21 | + </gradient> | ||
| 22 | + </aapt:attr> | ||
| 23 | + </path> | ||
| 24 | + <path | ||
| 25 | + android:fillColor="#FFFFFF" | ||
| 26 | + android:fillType="nonZero" | ||
| 27 | + android:pathData="M65.3,45.828l3.8,-6.6c0.2,-0.4 0.1,-0.9 -0.3,-1.1c-0.4,-0.2 -0.9,-0.1 -1.1,0.3l-3.9,6.7c-6.3,-2.8 -13.4,-2.8 -19.7,0l-3.9,-6.7c-0.2,-0.4 -0.7,-0.5 -1.1,-0.3C38.8,38.328 38.7,38.828 38.9,39.228l3.8,6.6C36.2,49.428 31.7,56.028 31,63.928h46C76.3,56.028 71.8,49.428 65.3,45.828zM43.4,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2c-0.3,-0.7 -0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C45.3,56.528 44.5,57.328 43.4,57.328L43.4,57.328zM64.6,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2s-0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C66.5,56.528 65.6,57.328 64.6,57.328L64.6,57.328z" | ||
| 28 | + android:strokeWidth="1" | ||
| 29 | + android:strokeColor="#00000000" /> | ||
| 30 | +</vector> |
| 1 | +<?xml version="1.0" encoding="utf-8"?> | ||
| 2 | +<vector xmlns:android="http://schemas.android.com/apk/res/android" | ||
| 3 | + android:width="108dp" | ||
| 4 | + android:height="108dp" | ||
| 5 | + android:viewportWidth="108" | ||
| 6 | + android:viewportHeight="108"> | ||
| 7 | + <path | ||
| 8 | + android:fillColor="#3DDC84" | ||
| 9 | + android:pathData="M0,0h108v108h-108z" /> | ||
| 10 | + <path | ||
| 11 | + android:fillColor="#00000000" | ||
| 12 | + android:pathData="M9,0L9,108" | ||
| 13 | + android:strokeWidth="0.8" | ||
| 14 | + android:strokeColor="#33FFFFFF" /> | ||
| 15 | + <path | ||
| 16 | + android:fillColor="#00000000" | ||
| 17 | + android:pathData="M19,0L19,108" | ||
| 18 | + android:strokeWidth="0.8" | ||
| 19 | + android:strokeColor="#33FFFFFF" /> | ||
| 20 | + <path | ||
| 21 | + android:fillColor="#00000000" | ||
| 22 | + android:pathData="M29,0L29,108" | ||
| 23 | + android:strokeWidth="0.8" | ||
| 24 | + android:strokeColor="#33FFFFFF" /> | ||
| 25 | + <path | ||
| 26 | + android:fillColor="#00000000" | ||
| 27 | + android:pathData="M39,0L39,108" | ||
| 28 | + android:strokeWidth="0.8" | ||
| 29 | + android:strokeColor="#33FFFFFF" /> | ||
| 30 | + <path | ||
| 31 | + android:fillColor="#00000000" | ||
| 32 | + android:pathData="M49,0L49,108" | ||
| 33 | + android:strokeWidth="0.8" | ||
| 34 | + android:strokeColor="#33FFFFFF" /> | ||
| 35 | + <path | ||
| 36 | + android:fillColor="#00000000" | ||
| 37 | + android:pathData="M59,0L59,108" | ||
| 38 | + android:strokeWidth="0.8" | ||
| 39 | + android:strokeColor="#33FFFFFF" /> | ||
| 40 | + <path | ||
| 41 | + android:fillColor="#00000000" | ||
| 42 | + android:pathData="M69,0L69,108" | ||
| 43 | + android:strokeWidth="0.8" | ||
| 44 | + android:strokeColor="#33FFFFFF" /> | ||
| 45 | + <path | ||
| 46 | + android:fillColor="#00000000" | ||
| 47 | + android:pathData="M79,0L79,108" | ||
| 48 | + android:strokeWidth="0.8" | ||
| 49 | + android:strokeColor="#33FFFFFF" /> | ||
| 50 | + <path | ||
| 51 | + android:fillColor="#00000000" | ||
| 52 | + android:pathData="M89,0L89,108" | ||
| 53 | + android:strokeWidth="0.8" | ||
| 54 | + android:strokeColor="#33FFFFFF" /> | ||
| 55 | + <path | ||
| 56 | + android:fillColor="#00000000" | ||
| 57 | + android:pathData="M99,0L99,108" | ||
| 58 | + android:strokeWidth="0.8" | ||
| 59 | + android:strokeColor="#33FFFFFF" /> | ||
| 60 | + <path | ||
| 61 | + android:fillColor="#00000000" | ||
| 62 | + android:pathData="M0,9L108,9" | ||
| 63 | + android:strokeWidth="0.8" | ||
| 64 | + android:strokeColor="#33FFFFFF" /> | ||
| 65 | + <path | ||
| 66 | + android:fillColor="#00000000" | ||
| 67 | + android:pathData="M0,19L108,19" | ||
| 68 | + android:strokeWidth="0.8" | ||
| 69 | + android:strokeColor="#33FFFFFF" /> | ||
| 70 | + <path | ||
| 71 | + android:fillColor="#00000000" | ||
| 72 | + android:pathData="M0,29L108,29" | ||
| 73 | + android:strokeWidth="0.8" | ||
| 74 | + android:strokeColor="#33FFFFFF" /> | ||
| 75 | + <path | ||
| 76 | + android:fillColor="#00000000" | ||
| 77 | + android:pathData="M0,39L108,39" | ||
| 78 | + android:strokeWidth="0.8" | ||
| 79 | + android:strokeColor="#33FFFFFF" /> | ||
| 80 | + <path | ||
| 81 | + android:fillColor="#00000000" | ||
| 82 | + android:pathData="M0,49L108,49" | ||
| 83 | + android:strokeWidth="0.8" | ||
| 84 | + android:strokeColor="#33FFFFFF" /> | ||
| 85 | + <path | ||
| 86 | + android:fillColor="#00000000" | ||
| 87 | + android:pathData="M0,59L108,59" | ||
| 88 | + android:strokeWidth="0.8" | ||
| 89 | + android:strokeColor="#33FFFFFF" /> | ||
| 90 | + <path | ||
| 91 | + android:fillColor="#00000000" | ||
| 92 | + android:pathData="M0,69L108,69" | ||
| 93 | + android:strokeWidth="0.8" | ||
| 94 | + android:strokeColor="#33FFFFFF" /> | ||
| 95 | + <path | ||
| 96 | + android:fillColor="#00000000" | ||
| 97 | + android:pathData="M0,79L108,79" | ||
| 98 | + android:strokeWidth="0.8" | ||
| 99 | + android:strokeColor="#33FFFFFF" /> | ||
| 100 | + <path | ||
| 101 | + android:fillColor="#00000000" | ||
| 102 | + android:pathData="M0,89L108,89" | ||
| 103 | + android:strokeWidth="0.8" | ||
| 104 | + android:strokeColor="#33FFFFFF" /> | ||
| 105 | + <path | ||
| 106 | + android:fillColor="#00000000" | ||
| 107 | + android:pathData="M0,99L108,99" | ||
| 108 | + android:strokeWidth="0.8" | ||
| 109 | + android:strokeColor="#33FFFFFF" /> | ||
| 110 | + <path | ||
| 111 | + android:fillColor="#00000000" | ||
| 112 | + android:pathData="M19,29L89,29" | ||
| 113 | + android:strokeWidth="0.8" | ||
| 114 | + android:strokeColor="#33FFFFFF" /> | ||
| 115 | + <path | ||
| 116 | + android:fillColor="#00000000" | ||
| 117 | + android:pathData="M19,39L89,39" | ||
| 118 | + android:strokeWidth="0.8" | ||
| 119 | + android:strokeColor="#33FFFFFF" /> | ||
| 120 | + <path | ||
| 121 | + android:fillColor="#00000000" | ||
| 122 | + android:pathData="M19,49L89,49" | ||
| 123 | + android:strokeWidth="0.8" | ||
| 124 | + android:strokeColor="#33FFFFFF" /> | ||
| 125 | + <path | ||
| 126 | + android:fillColor="#00000000" | ||
| 127 | + android:pathData="M19,59L89,59" | ||
| 128 | + android:strokeWidth="0.8" | ||
| 129 | + android:strokeColor="#33FFFFFF" /> | ||
| 130 | + <path | ||
| 131 | + android:fillColor="#00000000" | ||
| 132 | + android:pathData="M19,69L89,69" | ||
| 133 | + android:strokeWidth="0.8" | ||
| 134 | + android:strokeColor="#33FFFFFF" /> | ||
| 135 | + <path | ||
| 136 | + android:fillColor="#00000000" | ||
| 137 | + android:pathData="M19,79L89,79" | ||
| 138 | + android:strokeWidth="0.8" | ||
| 139 | + android:strokeColor="#33FFFFFF" /> | ||
| 140 | + <path | ||
| 141 | + android:fillColor="#00000000" | ||
| 142 | + android:pathData="M29,19L29,89" | ||
| 143 | + android:strokeWidth="0.8" | ||
| 144 | + android:strokeColor="#33FFFFFF" /> | ||
| 145 | + <path | ||
| 146 | + android:fillColor="#00000000" | ||
| 147 | + android:pathData="M39,19L39,89" | ||
| 148 | + android:strokeWidth="0.8" | ||
| 149 | + android:strokeColor="#33FFFFFF" /> | ||
| 150 | + <path | ||
| 151 | + android:fillColor="#00000000" | ||
| 152 | + android:pathData="M49,19L49,89" | ||
| 153 | + android:strokeWidth="0.8" | ||
| 154 | + android:strokeColor="#33FFFFFF" /> | ||
| 155 | + <path | ||
| 156 | + android:fillColor="#00000000" | ||
| 157 | + android:pathData="M59,19L59,89" | ||
| 158 | + android:strokeWidth="0.8" | ||
| 159 | + android:strokeColor="#33FFFFFF" /> | ||
| 160 | + <path | ||
| 161 | + android:fillColor="#00000000" | ||
| 162 | + android:pathData="M69,19L69,89" | ||
| 163 | + android:strokeWidth="0.8" | ||
| 164 | + android:strokeColor="#33FFFFFF" /> | ||
| 165 | + <path | ||
| 166 | + android:fillColor="#00000000" | ||
| 167 | + android:pathData="M79,19L79,89" | ||
| 168 | + android:strokeWidth="0.8" | ||
| 169 | + android:strokeColor="#33FFFFFF" /> | ||
| 170 | +</vector> |
不能预览此文件类型
不能预览此文件类型
不能预览此文件类型
不能预览此文件类型
不能预览此文件类型
不能预览此文件类型
不能预览此文件类型
不能预览此文件类型
不能预览此文件类型
不能预览此文件类型
| 1 | +<?xml version="1.0" encoding="utf-8"?> | ||
| 2 | +<resources> | ||
| 3 | + <color name="purple_200">#FFBB86FC</color> | ||
| 4 | + <color name="purple_500">#FF6200EE</color> | ||
| 5 | + <color name="purple_700">#FF3700B3</color> | ||
| 6 | + <color name="teal_200">#FF03DAC5</color> | ||
| 7 | + <color name="teal_700">#FF018786</color> | ||
| 8 | + <color name="black">#FF000000</color> | ||
| 9 | + <color name="white">#FFFFFFFF</color> | ||
| 10 | +</resources> |
sample-app-record-local/src/test/java/io/livekit/android/sample/record/ExampleUnitTest.kt
0 → 100644
| 1 | +package io.livekit.android.sample.record | ||
| 2 | + | ||
| 3 | +import org.junit.Test | ||
| 4 | + | ||
| 5 | +import org.junit.Assert.* | ||
| 6 | + | ||
| 7 | +/** | ||
| 8 | + * Example local unit test, which will execute on the development machine (host). | ||
| 9 | + * | ||
| 10 | + * See [testing documentation](http://d.android.com/tools/testing). | ||
| 11 | + */ | ||
| 12 | +class ExampleUnitTest { | ||
| 13 | + @Test | ||
| 14 | + fun addition_isCorrect() { | ||
| 15 | + assertEquals(4, 2 + 2) | ||
| 16 | + } | ||
| 17 | +} |
| 1 | -<?xml version="1.0" encoding="utf-8"?> | ||
| 2 | -<network-security-config> | ||
| 3 | - <domain-config cleartextTrafficPermitted="true"> | ||
| 4 | - <domain includeSubdomains="true">example.com</domain> | ||
| 5 | - </domain-config> | ||
| 6 | - | ||
| 7 | - <base-config cleartextTrafficPermitted="true"> | ||
| 8 | - <trust-anchors> | ||
| 9 | - <certificates src="system" /> | ||
| 10 | - </trust-anchors> | ||
| 11 | - </base-config> | ||
| 12 | -</network-security-config> |
| @@ -9,3 +9,4 @@ include ':sample-app-common' | @@ -9,3 +9,4 @@ include ':sample-app-common' | ||
| 9 | include ':livekit-lint' | 9 | include ':livekit-lint' |
| 10 | include ':video-encode-decode-test' | 10 | include ':video-encode-decode-test' |
| 11 | include ':sample-app-basic' | 11 | include ':sample-app-basic' |
| 12 | +include ':sample-app-record-local' |
-
请 注册 或 登录 后发表评论