diff --git a/.github/ci/build/build_windows.bat b/.github/ci/build/build_windows.bat index 7bdbb98f3..4c83f4263 100644 --- a/.github/ci/build/build_windows.bat +++ b/.github/ci/build/build_windows.bat @@ -66,14 +66,11 @@ echo zip_name: %zip_name% dir -echo off -REM curl --silent %sdk_url% ./ -python %WORKSPACE%\\artifactory_utils.py --action=download_file --file=%sdk_url% +curl %sdk_url% -o %zip_name% +REM python %WORKSPACE%\\artifactory_utils.py --action=download_file --file=%sdk_url% 7z x ./%zip_name% -y -echo on dir - rmdir /S /Q Agora_Native_SDK_for_Windows_FULL\demo del /F /Q Agora_Native_SDK_for_Windows_FULL\commits del /F /Q Agora_Native_SDK_for_Windows_FULL\package_size_report.txt diff --git a/.github/ci/build/modify_podfile.py b/.github/ci/build/modify_podfile.py index 1f63ee796..3cf89c2bd 100644 --- a/.github/ci/build/modify_podfile.py +++ b/.github/ci/build/modify_podfile.py @@ -5,15 +5,17 @@ def modfiy(path): contents = [] for num, line in enumerate(file): if "pod 'Agora" in line: - line = '\t'+"pod 'sdk', :path => '../../sdk.podspec'" + line = '\t'+"pod 'sdk', :path => '../../sdk.podspec'" + "\n" elif "pod 'sdk" in line: line = "" + elif 'sh .download_script' in line: + line = line.replace('true', 'false') + "\n" contents.append(line) file.close() with open(path, 'w', encoding='utf-8') as fw: for content in contents: - fw.write(content + "\n") + fw.write(content) fw.close() diff --git a/.github/workflows/gitee-sync-shell.sh b/.github/workflows/gitee-sync-shell.sh index 1a7f4f437..c5728c2b9 100755 --- a/.github/workflows/gitee-sync-shell.sh +++ b/.github/workflows/gitee-sync-shell.sh @@ -13,6 +13,19 @@ sed -ie "s#https://services.gradle.org/distributions#https://mirrors.cloud.tence git add Android/APIExample/settings.gradle Android/APIExample/gradle/wrapper/gradle-wrapper.properties Android/APIExample-Audio/settings.gradle Android/APIExample-Audio/gradle/wrapper/gradle-wrapper.properties git commit -m '[Android] gitee sync >> use china repos.' +# change iOS Podfile to china repos +python3 .github/workflows/modify_podfile.py iOS/APIExample/Podfile +python3 .github/workflows/modify_podfile.py iOS/APIExample-Audio/Podfile +python3 .github/workflows/modify_podfile.py iOS/APIExample-OC/Podfile +python3 .github/workflows/modify_podfile.py macOS/Podfile + +# sed -ie '1s#^#source "https://mirrors.tuna.tsinghua.edu.cn/git/CocoaPods/Specs.git"\n#' iOS/APIExample/Podfile +# sed -ie '1s#^#source "https://mirrors.tuna.tsinghua.edu.cn/git/CocoaPods/Specs.git"\n#' iOS/APIExample-Audio/Podfile +# sed -ie '1s#^#source "https://mirrors.tuna.tsinghua.edu.cn/git/CocoaPods/Specs.git"\n#' iOS/APIExample-OC/Podfile +# sed -ie '1s#^#source "https://mirrors.tuna.tsinghua.edu.cn/git/CocoaPods/Specs.git"\n#' macOS/Podfile +git add iOS/APIExample/Podfile iOS/APIExample-Audio/Podfile iOS/APIExample-OC/Podfile macOS/Podfile +git commit -m '[iOS] gitee sync >> use china repos.' + git branch git status git push gitee diff --git a/.github/workflows/gitee-sync.yml b/.github/workflows/gitee-sync.yml index 02c9462fe..9353bc25b 100644 --- a/.github/workflows/gitee-sync.yml +++ b/.github/workflows/gitee-sync.yml @@ -14,7 +14,7 @@ jobs: if: github.actor != 'dependabot[bot]' steps: - name: Gitee sync repo - uses: xgfd3/hub-mirror-action@v1.0 + uses: xgfd3/hub-mirror-action@v1.4 with: src: github/AgoraIO dst: gitee/agoraio-community @@ -25,4 +25,5 @@ jobs: dst_token: ${{ secrets.GITEE_PRIVATE_TOKEN }} force_update: true account_type: org - shell_path: ./.github/workflows/gitee-sync-shell.sh \ No newline at end of file + shell_path: ./.github/workflows/gitee-sync-shell.sh + github_ref: ${{ github.ref }} diff --git a/.github/workflows/modify_podfile.py b/.github/workflows/modify_podfile.py new file mode 100644 index 000000000..0bd6fb107 --- /dev/null +++ b/.github/workflows/modify_podfile.py @@ -0,0 +1,34 @@ +import os, sys + +def modfiy(path): + with open(path, 'r', encoding='utf-8') as file: + contents = [] + for num, line in enumerate(file): + if "pod 'Agora" in line: + line = '\t'+"pod 'sdk', :path => 'sdk.podspec'" + "\n" + elif "pod 'sdk" in line: + line = "" + elif "pod 'Floaty" in line: + line = '\t'+"pod 'Floaty', :git => 'https://gitee.com/shengwang-dependencies/Floaty.git'" + "\n" + elif "pod 'AGEVideoLayout" in line: + line = '\t'+"pod 'AGEVideoLayout', :git => 'https://gitee.com/shengwang-dependencies/AGEVideoLayout.git'" + "\n" + elif "pod 'CocoaAsyncSocket" in line: + line = '\t'+"pod 'CocoaAsyncSocket', :git => 'https://gitee.com/shengwang-dependencies/CocoaAsyncSocket.git'" + "\n" + elif "pod 'SwiftLint" in line: + line = '\t'+"pod 'SwiftLint', :git => 'https://gitee.com/shengwang-dependencies/SwiftLint', :commit => '1067113303c134ef472a71b30d21e5350de7889d'" + "\n" + elif "pod 'ijkplayer" in line: + line = '\t'+"pod 'ijkplayer', :path => 'ijkplayer.podspec'" + "\n" + elif 'sh .download_script' in line: + line = line.replace('#', '').replace('false', 'true') + contents.append(line) + file.close() + + with open(path, 'w', encoding='utf-8') as fw: + for content in contents: + fw.write(content) + fw.close() + + +if __name__ == '__main__': + path = sys.argv[1:][0] + modfiy(path.strip()) \ No newline at end of file diff --git a/Android/APIExample-Audio/app/build.gradle b/Android/APIExample-Audio/app/build.gradle index c61ffa95d..73679fdca 100644 --- a/Android/APIExample-Audio/app/build.gradle +++ b/Android/APIExample-Audio/app/build.gradle @@ -48,7 +48,7 @@ dependencies { implementation fileTree(dir: "${localSdkPath}", include: ['*.jar', '*.aar']) } else{ - def agora_sdk_version = "4.3.0" + def agora_sdk_version = "4.3.1" // case 1: full single lib with voice only implementation "io.agora.rtc:voice-sdk:${agora_sdk_version}" // case 2: partial libs with voice only diff --git a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/MainActivity.java b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/MainActivity.java index 48889e115..5f59203cc 100644 --- a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/MainActivity.java +++ b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/MainActivity.java @@ -30,9 +30,8 @@ protected void onCreate(Bundle savedInstanceState) { @Override public boolean onSupportNavigateUp() { - NavController navController = Navigation.findNavController(this, R.id.nav_host_fragment); - return NavigationUI.navigateUp(navController, appBarConfiguration) - || super.onSupportNavigateUp(); + onBackPressed(); + return false; } @Override diff --git a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/common/BaseFragment.java b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/common/BaseFragment.java index 3dde5fc35..68bddbbf3 100644 --- a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/common/BaseFragment.java +++ b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/common/BaseFragment.java @@ -36,7 +36,7 @@ public void handleOnBackPressed() { public void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); handler = new Handler(Looper.getMainLooper()); - requireActivity().getOnBackPressedDispatcher().addCallback(onBackPressedCallback); + requireActivity().getOnBackPressedDispatcher().addCallback(this, onBackPressedCallback); } @Override diff --git a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/ProcessAudioRawData.java b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/ProcessAudioRawData.java index 99e7d1561..f097b2b49 100644 --- a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/ProcessAudioRawData.java +++ b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/ProcessAudioRawData.java @@ -334,7 +334,7 @@ public boolean onEarMonitoringAudioFrame(int type, int samplesPerChannel, int by } @Override - public boolean onPlaybackAudioFrameBeforeMixing(String channel, int uid, int audioFrameType, int samples, int bytesPerSample, int channels, int samplesPerSec, ByteBuffer byteBuffer, long renderTimeMs, int bufferLength) { + public boolean onPlaybackAudioFrameBeforeMixing(String channelId, int uid, int type, int samplesPerChannel, int bytesPerSample, int channels, int samplesPerSec, ByteBuffer buffer, long renderTimeMs, int avsync_type, int rtpTimestamp) { return false; } diff --git a/Android/APIExample-Audio/gradle/wrapper/gradle-wrapper.properties b/Android/APIExample-Audio/gradle/wrapper/gradle-wrapper.properties index 061df5425..00a88fb69 100644 --- a/Android/APIExample-Audio/gradle/wrapper/gradle-wrapper.properties +++ b/Android/APIExample-Audio/gradle/wrapper/gradle-wrapper.properties @@ -3,4 +3,4 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionUrl=https://services.gradle.org/distributions/gradle-7.3.3-bin.zip +distributionUrl=https://mirrors.cloud.tencent.com/gradle/gradle-7.3.3-bin.zip diff --git a/Android/APIExample-Audio/settings.gradle b/Android/APIExample-Audio/settings.gradle index 7aa1c9aba..4b5c32e8d 100644 --- a/Android/APIExample-Audio/settings.gradle +++ b/Android/APIExample-Audio/settings.gradle @@ -1,5 +1,6 @@ pluginManagement { repositories { + maven { url "https://maven.aliyun.com/repository/public" } google() mavenCentral() gradlePluginPortal() @@ -8,6 +9,7 @@ pluginManagement { dependencyResolutionManagement { repositoriesMode.set(RepositoriesMode.FAIL_ON_PROJECT_REPOS) repositories { + maven { url "https://maven.aliyun.com/repository/public" } google() mavenCentral() maven { url "https://jitpack.io" } diff --git a/Android/APIExample/app/build.gradle b/Android/APIExample/app/build.gradle index bec40f6a2..2b347fffd 100644 --- a/Android/APIExample/app/build.gradle +++ b/Android/APIExample/app/build.gradle @@ -1,9 +1,12 @@ apply plugin: 'com.android.application' apply plugin: 'kotlin-android' apply from: "${rootDir.absolutePath}/git-hooks.gradle" +apply from: 'vendors.gradle' +def agoraSdkVersion = "4.3.1" def localSdkPath= "${rootProject.projectDir.absolutePath}/../../sdk" + android { compileSdkVersion 32 buildToolsVersion "32.0.0" @@ -54,6 +57,14 @@ android { viewBinding true } + applicationVariants.all { + variant -> + variant.outputs.all { output -> + outputFileName = new File(rootProject.name + + "_" + agoraSdkVersion + + "_" + new Date().format("yyyyMMddHHmm") + ".apk") + } + } } dependencies { @@ -62,26 +73,26 @@ dependencies { if(new File("${localSdkPath}").exists()){ implementation fileTree(dir: "${localSdkPath}", include: ['*.jar', '*.aar']) } - else{ - def agora_sdk_version = "4.3.0" + else { + // case 1: full libs - implementation "io.agora.rtc:full-sdk:${agora_sdk_version}" - implementation "io.agora.rtc:full-screen-sharing:${agora_sdk_version}" + implementation "io.agora.rtc:full-sdk:${agoraSdkVersion}" + implementation "io.agora.rtc:full-screen-sharing:${agoraSdkVersion}" // case 2: partial libs - // implementation "io.agora.rtc:full-rtc-basic:${agora_sdk_version}" - // implementation "io.agora.rtc:ains:${agora_sdk_version}" - // implementation "io.agora.rtc:full-content-inspect:${agora_sdk_version}" - // implementation "io.agora.rtc:full-virtual-background:${agora_sdk_version}" - // implementation "io.agora.rtc:full-super-resolution:${agora_sdk_version}" - // implementation "io.agora.rtc:spatial-audio:${agora_sdk_version}" - // implementation "io.agora.rtc:audio-beauty:${agora_sdk_version}" - // implementation "io.agora.rtc:clear-vision:${agora_sdk_version}" - // implementation "io.agora.rtc:pvc:${agora_sdk_version}" - // implementation "io.agora.rtc:screen-capture:${agora_sdk_version}" - // implementation "io.agora.rtc:aiaec:${agora_sdk_version}" - // implementation "io.agora.rtc:drm-loader:${agora_sdk_version}" - // implementation "io.agora.rtc:drm:${agora_sdk_version}" - // implementation "io.agora.rtc:full-vqa:${agora_sdk_version}" + // implementation "io.agora.rtc:full-rtc-basic:${agoraSdkVersion}" + // implementation "io.agora.rtc:ains:${agoraSdkVersion}" + // implementation "io.agora.rtc:full-content-inspect:${agoraSdkVersion}" + // implementation "io.agora.rtc:full-virtual-background:${agoraSdkVersion}" + // implementation "io.agora.rtc:full-super-resolution:${agoraSdkVersion}" + // implementation "io.agora.rtc:spatial-audio:${agoraSdkVersion}" + // implementation "io.agora.rtc:audio-beauty:${agoraSdkVersion}" + // implementation "io.agora.rtc:clear-vision:${agoraSdkVersion}" + // implementation "io.agora.rtc:pvc:${agoraSdkVersion}" + // implementation "io.agora.rtc:screen-capture:${agoraSdkVersion}" + // implementation "io.agora.rtc:aiaec:${agoraSdkVersion}" + // implementation "io.agora.rtc:drm-loader:${agoraSdkVersion}" + // implementation "io.agora.rtc:drm:${agoraSdkVersion}" + // implementation "io.agora.rtc:full-vqa:${agoraSdkVersion}" } @@ -111,8 +122,8 @@ dependencies { implementation "com.squareup.okhttp3:okhttp:4.10.0" implementation "com.squareup.okhttp3:logging-interceptor:4.10.0" - implementation 'com.faceunity:core:8.3.0' - implementation 'com.faceunity:model:8.3.0' + implementation 'com.faceunity:core:8.7.0' + implementation 'com.faceunity:model:8.7.0' implementation 'tv.danmaku.ijk.media:ijkplayer-java:0.8.8' implementation 'tv.danmaku.ijk.media:ijkplayer-armv7a:0.8.8' diff --git a/Android/APIExample/app/libs/HardwareBuffer-release.aar b/Android/APIExample/app/libs/HardwareBuffer-release.aar deleted file mode 100644 index 4f1066a79..000000000 Binary files a/Android/APIExample/app/libs/HardwareBuffer-release.aar and /dev/null differ diff --git a/Android/APIExample/app/libs/STMobileJNI-release.aar b/Android/APIExample/app/libs/STMobileJNI-release.aar deleted file mode 100644 index 9760d01ca..000000000 Binary files a/Android/APIExample/app/libs/STMobileJNI-release.aar and /dev/null differ diff --git a/Android/APIExample/app/libs/SenseArSourceManager-release.aar b/Android/APIExample/app/libs/SenseArSourceManager-release.aar deleted file mode 100644 index 759591b2c..000000000 Binary files a/Android/APIExample/app/libs/SenseArSourceManager-release.aar and /dev/null differ diff --git a/Android/APIExample/app/libs/effectAAR-release.aar b/Android/APIExample/app/libs/effectAAR-release.aar deleted file mode 100644 index aad248636..000000000 Binary files a/Android/APIExample/app/libs/effectAAR-release.aar and /dev/null differ diff --git a/Android/APIExample/app/src/main/AndroidManifest.xml b/Android/APIExample/app/src/main/AndroidManifest.xml index 35df3018e..0360c65c0 100644 --- a/Android/APIExample/app/src/main/AndroidManifest.xml +++ b/Android/APIExample/app/src/main/AndroidManifest.xml @@ -15,6 +15,7 @@ + @@ -34,10 +35,11 @@ diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/common/BaseFragment.java b/Android/APIExample/app/src/main/java/io/agora/api/example/common/BaseFragment.java index 3dde5fc35..68bddbbf3 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/common/BaseFragment.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/common/BaseFragment.java @@ -36,7 +36,7 @@ public void handleOnBackPressed() { public void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); handler = new Handler(Looper.getMainLooper()); - requireActivity().getOnBackPressedDispatcher().addCallback(onBackPressedCallback); + requireActivity().getOnBackPressedDispatcher().addCallback(this, onBackPressedCallback); } @Override diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ChannelEncryption.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ChannelEncryption.java index 2e555a3d1..7f71b021f 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ChannelEncryption.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ChannelEncryption.java @@ -9,6 +9,7 @@ import android.content.Context; import android.os.Bundle; +import android.util.Base64; import android.util.Log; import android.view.LayoutInflater; import android.view.SurfaceView; @@ -28,7 +29,6 @@ import com.yanzhenjie.permission.runtime.Permission; import java.lang.reflect.Method; -import java.nio.charset.StandardCharsets; import io.agora.api.example.MainApplication; import io.agora.api.example.R; @@ -255,7 +255,10 @@ private void enablePacketProcessor(boolean enable) { } private byte[] getKdfSaltFromServer() { - return "EncryptionKdfSaltInBase64Strings".getBytes(StandardCharsets.UTF_8); + // Salt string should be the output of the following command: + // openssl rand -base64 32 + String saltBase64String = "NiIeJ08AbtcQVjvV+oOEvF/4Dz5dy1CIwa805C8J2w0="; + return Base64.decode(saltBase64String, Base64.DEFAULT); } private void joinChannel(String channelId) { diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/CustomRemoteVideoRender.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/CustomRemoteVideoRender.java index 13edffa2e..fded9f26b 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/CustomRemoteVideoRender.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/CustomRemoteVideoRender.java @@ -568,7 +568,7 @@ public void onDrawFrame(GL10 gl) { renderMatrix.preRotate(lastI420Frame.getRotation()); renderMatrix.preTranslate(-0.5f, -0.5f); try { - drawer.drawYuv(yuvUploader.getYuvTextures(), + drawer.drawYuv(yuvUploader.getYuvTextures(), 0, RendererCommon.convertMatrixFromAndroidGraphicsMatrix(renderMatrix), lastI420Frame.getRotatedWidth(), lastI420Frame.getRotatedHeight(), 0, 0, viewportWidth, viewportHeight); } catch (NullPointerException exception) { diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/FaceCapture.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/FaceCapture.java index efb55f1ac..842e11d39 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/FaceCapture.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/FaceCapture.java @@ -35,6 +35,7 @@ import io.agora.rtc2.RtcEngine; import io.agora.rtc2.RtcEngineConfig; import io.agora.rtc2.proxy.LocalAccessPointConfiguration; +import io.agora.rtc2.video.IFaceInfoObserver; import io.agora.rtc2.video.IVideoFrameObserver; import io.agora.rtc2.video.VideoCanvas; import io.agora.rtc2.video.VideoEncoderConfiguration; @@ -52,7 +53,7 @@ public class FaceCapture extends BaseFragment implements View.OnClickListener { private static final String TAG = FaceCapture.class.getSimpleName(); - private static final String AUTHENTICATION = ""; + private static final String AUTHENTICATION = "i1RXlMcDyB5F6Yn9aPijy1MouDxdsI4ajOj+NrHsc99FntK1drIlJNWdodNzBjAeH8cSUSuRkX6LwoE0WDgEAnivcXbsYv8atcZYZFl4FoXONgI6uIOmi+r5SGjsgZe+yI6wdh5eW1pLLh6m/W02M35EX1wtBvP3l79q3KeGsDQ="; private FrameLayout fl_local; private Button join; @@ -95,6 +96,7 @@ public void onCreate(@Nullable Bundle savedInstanceState) { config.mAreaCode = ((MainApplication) requireActivity().getApplication()).getGlobalSettings().getAreaCode(); /* For Android, the agora_face_capture_extension will not load default. You must add it manually. */ config.addExtension("agora_face_capture_extension"); + config.addExtension("agora_lip_sync_extension"); /* Config extension observer to receive the events. */ config.mExtensionObserver = iMediaExtensionObserver; engine = RtcEngine.create(config); @@ -118,6 +120,7 @@ public void onCreate(@Nullable Bundle savedInstanceState) { } engine.registerVideoFrameObserver(iVideoFrameObserver); + engine.registerFaceInfoObserver(iFaceInfoObserver); engine.enableExtension("agora_video_filters_face_capture", "face_capture", true, PRIMARY_CAMERA_SOURCE); engine.setExtensionProperty("agora_video_filters_face_capture", "face_capture", @@ -127,6 +130,15 @@ public void onCreate(@Nullable Bundle savedInstanceState) { + AUTHENTICATION + "\"}" ); + + engine.enableExtension("agora_filters_lip_sync", "lip_sync", true, Constants.MediaSourceType.SPEECH_DRIVEN_VIDEO_SOURCE); + engine.setExtensionProperty("agora_filters_lip_sync","lip_sync", "parameters", + "{\"company_id\":\"agoraTest\"" + + ",\"license\":\"" + AUTHENTICATION + "\"" + + ",\"open_agc\":true" + + "}", + Constants.MediaSourceType.SPEECH_DRIVEN_VIDEO_SOURCE); + } catch (Exception e) { e.printStackTrace(); requireActivity().onBackPressed(); @@ -263,7 +275,7 @@ public boolean onRenderVideoFrame(String channelId, int uid, VideoFrame videoFra public boolean onCaptureVideoFrame(int sourceType, VideoFrame videoFrame) { Log.i(TAG, String.format(Locale.US, "VideoFrameObserver >> onCaptureVideoFrame : metadata=%s", videoFrame.getMetaInfo().toString())); - runOnUIThread(() -> et_capture_info.setText(videoFrame.getMetaInfo().toString())); + // runOnUIThread(() -> et_capture_info.setText(videoFrame.getMetaInfo().toString())); return true; } @@ -319,6 +331,14 @@ public void onEvent(String provider, String extension, String key, String value) showShortToast("Face capture authentication information not set!"); showAlert(getString(R.string.face_capture_authentication), false); } + } else if ("agora_filters_lip_sync".equals(provider) + && "lip_sync".equals(extension) + && "status_code".equals(key)) { + if ("0".equals(value)) { + showShortToast("Speech driven authentication successful."); + } else { + showShortToast("Speech driven authentication failed. code=" + value); + } } } @@ -341,6 +361,14 @@ public void onError(String provider, String extension, int error, String message } }; + private final IFaceInfoObserver iFaceInfoObserver = new IFaceInfoObserver() { + @Override + public boolean onFaceInfo(String outFaceInfo) { + runOnUIThread(() -> et_capture_info.setText(outFaceInfo)); + return false; + } + }; + /** * IRtcEngineEventHandler is an abstract class providing default implementation. * The SDK uses this class to report to the app on SDK runtime events. diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/LiveStreaming.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/LiveStreaming.java index 91a456ded..99dd98d78 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/LiveStreaming.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/LiveStreaming.java @@ -14,6 +14,7 @@ import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; +import android.widget.ArrayAdapter; import android.widget.FrameLayout; import android.widget.SeekBar; import android.widget.Toast; @@ -27,6 +28,7 @@ import com.yanzhenjie.permission.runtime.Permission; import java.io.File; +import java.util.ArrayList; import java.util.Locale; import java.util.Random; @@ -49,6 +51,8 @@ import io.agora.rtc2.RtcEngine; import io.agora.rtc2.RtcEngineConfig; import io.agora.rtc2.proxy.LocalAccessPointConfiguration; +import io.agora.rtc2.video.AgoraFocalLengthInfo; +import io.agora.rtc2.video.CameraCapturerConfiguration; import io.agora.rtc2.video.ImageTrackOptions; import io.agora.rtc2.video.VideoCanvas; import io.agora.rtc2.video.VideoEncoderConfiguration; @@ -88,6 +92,7 @@ public class LiveStreaming extends BaseFragment implements View.OnClickListener private int canvasBgColor = 0x0000ffff; // RGBA private int canvasRenderMode = Constants.RENDER_MODE_HIDDEN; private final VideoEncoderConfiguration videoEncoderConfiguration = new VideoEncoderConfiguration(); + private AgoraFocalLengthInfo[] agoraFocalLengthInfos; @Nullable @Override @@ -106,6 +111,7 @@ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceStat mRootBinding.btnPreload.setOnClickListener(this); mRootBinding.btnPublish.setOnClickListener(this); mRootBinding.btnRemoteScreenshot.setOnClickListener(this); + mRootBinding.btnSwitchCamera.setOnClickListener(this); foreGroundVideo.setOnClickListener(this); mSettingBinding = FragmentLiveStreamingSettingBinding.inflate(LayoutInflater.from(getContext())); @@ -192,6 +198,25 @@ public void onStopTrackingTouch(SeekBar seekBar) { 15 )); }); + mSettingBinding.spCamera.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() { + @Override + public void onItemSelected(AdapterView parent, View view, int position, long id) { + AgoraFocalLengthInfo info = agoraFocalLengthInfos[position]; + CameraCapturerConfiguration config = new CameraCapturerConfiguration( + getCameraDirection(info), + getFocalLengthType(info) + ); + int ret = engine.setCameraCapturerConfiguration( + config + ); + Log.d(TAG, "setCameraCapturerConfiguration ret=" + ret); + } + + @Override + public void onNothingSelected(AdapterView parent) { + + } + }); mSettingDialog = new BottomSheetDialog(requireContext()); mSettingDialog.setContentView(mSettingBinding.getRoot()); } @@ -274,12 +299,89 @@ public void onActivityCreated(@Nullable Bundle savedInstanceState) { engine.setVideoEncoderConfiguration(videoEncoderConfiguration); engine.enableDualStreamMode(true); + + agoraFocalLengthInfos = engine.queryCameraFocalLengthCapability(); + ArrayList strings = new ArrayList<>(); + for (int i = 0; i < agoraFocalLengthInfos.length; i++) { + AgoraFocalLengthInfo info = agoraFocalLengthInfos[i]; + + String cameraDirection = getString(R.string.camera_front); + if (getCameraDirection(info) == CameraCapturerConfiguration.CAMERA_DIRECTION.CAMERA_REAR) { + cameraDirection = getString(R.string.camera_rear); + } else if (getCameraDirection(info) == CameraCapturerConfiguration.CAMERA_DIRECTION.CAMERA_EXTRA) { + cameraDirection = getString(R.string.camera_extral); + } + String focalLength = getString(R.string.camera_focal_default); + if (getFocalLengthType(info) == CameraCapturerConfiguration.CAMERA_FOCAL_LENGTH_TYPE.CAMERA_FOCAL_LENGTH_WIDE_ANGLE) { + focalLength = getString(R.string.camera_focal_wide_angle); + } else if (getFocalLengthType(info) == CameraCapturerConfiguration.CAMERA_FOCAL_LENGTH_TYPE.CAMERA_FOCAL_LENGTH_ULTRA_WIDE) { + focalLength = getString(R.string.camera_focal_urltra_wide); + } else if (getFocalLengthType(info) == CameraCapturerConfiguration.CAMERA_FOCAL_LENGTH_TYPE.CAMERA_FOCAL_LENGTH_TELEPHOTO) { + focalLength = getString(R.string.camera_focal_telephoto); + } + + strings.add(String.format(Locale.US, "[%s] %s", cameraDirection, focalLength)); + } + mSettingBinding.spCamera.setAdapter(new ArrayAdapter(requireContext(), android.R.layout.simple_spinner_dropdown_item, strings)); } catch (Exception e) { requireActivity().onBackPressed(); e.printStackTrace(); } } + /** + * Get the camera direction from the AgoraFocalLengthInfo + * + * @param info AgoraFocalLengthInfo + * @return Camera direction + */ + private static CameraCapturerConfiguration.CAMERA_DIRECTION getCameraDirection(AgoraFocalLengthInfo info) { + try { + String string = info.toString(); + String[] split = string.split("cameraDirection"); + String substring = split[1].substring(1, 2); + int cameraDirection = Integer.parseInt(substring); + if (cameraDirection == CameraCapturerConfiguration.CAMERA_DIRECTION.CAMERA_FRONT.getValue()) { + return CameraCapturerConfiguration.CAMERA_DIRECTION.CAMERA_FRONT; + } else if (cameraDirection == CameraCapturerConfiguration.CAMERA_DIRECTION.CAMERA_REAR.getValue()) { + return CameraCapturerConfiguration.CAMERA_DIRECTION.CAMERA_REAR; + } else if (cameraDirection == CameraCapturerConfiguration.CAMERA_DIRECTION.CAMERA_EXTRA.getValue()) { + return CameraCapturerConfiguration.CAMERA_DIRECTION.CAMERA_EXTRA; + } + } catch (Exception e) { + Log.e(TAG, "getCameraDirection error=" + e.getMessage()); + } + return CameraCapturerConfiguration.CAMERA_DIRECTION.CAMERA_FRONT; + } + + /** + * Get the focal length type from the AgoraFocalLengthInfo + * + * @param info AgoraFocalLengthInfo + * @return Focal length type + */ + private static CameraCapturerConfiguration.CAMERA_FOCAL_LENGTH_TYPE getFocalLengthType(AgoraFocalLengthInfo info) { + try { + String string = info.toString(); + String[] split = string.split("focalLengthType"); + String substring = split[1].substring(1, 2); + int focalLength = Integer.parseInt(substring); + if (focalLength == CameraCapturerConfiguration.CAMERA_FOCAL_LENGTH_TYPE.CAMERA_FOCAL_LENGTH_DEFAULT.getValue()) { + return CameraCapturerConfiguration.CAMERA_FOCAL_LENGTH_TYPE.CAMERA_FOCAL_LENGTH_DEFAULT; + } else if (focalLength == CameraCapturerConfiguration.CAMERA_FOCAL_LENGTH_TYPE.CAMERA_FOCAL_LENGTH_WIDE_ANGLE.getValue()) { + return CameraCapturerConfiguration.CAMERA_FOCAL_LENGTH_TYPE.CAMERA_FOCAL_LENGTH_WIDE_ANGLE; + } else if (focalLength == CameraCapturerConfiguration.CAMERA_FOCAL_LENGTH_TYPE.CAMERA_FOCAL_LENGTH_ULTRA_WIDE.getValue()) { + return CameraCapturerConfiguration.CAMERA_FOCAL_LENGTH_TYPE.CAMERA_FOCAL_LENGTH_ULTRA_WIDE; + } else if (focalLength == CameraCapturerConfiguration.CAMERA_FOCAL_LENGTH_TYPE.CAMERA_FOCAL_LENGTH_TELEPHOTO.getValue()) { + return CameraCapturerConfiguration.CAMERA_FOCAL_LENGTH_TYPE.CAMERA_FOCAL_LENGTH_TELEPHOTO; + } + } catch (Exception e) { + Log.e(TAG, "getFocalLengthType error=" + e.getMessage()); + } + return CameraCapturerConfiguration.CAMERA_FOCAL_LENGTH_TYPE.CAMERA_FOCAL_LENGTH_DEFAULT; + } + + @Override public void onDestroy() { super.onDestroy(); @@ -425,6 +527,8 @@ public void onClick(View v) { } }); } + } else if (v.getId() == R.id.btn_switch_camera) { + engine.switchCamera(); } } diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/VideoMetadata.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MediaMetadata.java similarity index 90% rename from Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/VideoMetadata.java rename to Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MediaMetadata.java index ef2ff8b4a..1102412ae 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/VideoMetadata.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MediaMetadata.java @@ -9,6 +9,7 @@ import android.content.Context; import android.os.Bundle; +import android.text.TextUtils; import android.util.Log; import android.view.LayoutInflater; import android.view.SurfaceView; @@ -26,6 +27,7 @@ import com.yanzhenjie.permission.runtime.Permission; import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import io.agora.api.example.MainApplication; import io.agora.api.example.R; @@ -48,18 +50,18 @@ @Example( index = 3, group = ADVANCED, - name = R.string.item_videometadata, - actionId = R.id.action_mainFragment_to_VideoMetadata, - tipsId = R.string.videometadata + name = R.string.item_mediametadata, + actionId = R.id.action_mainFragment_to_MediaMetadata, + tipsId = R.string.mediametadata ) -public class VideoMetadata extends BaseFragment implements View.OnClickListener { +public class MediaMetadata extends BaseFragment implements View.OnClickListener { /** * The constant TAG. */ - public static final String TAG = VideoMetadata.class.getSimpleName(); + public static final String TAG = MediaMetadata.class.getSimpleName(); private FrameLayout fl_local, fl_remote; - private Button send, join; - private EditText et_channel; + private Button join, btn_send_video_metadata, btn_send_audio_metadata; + private EditText et_channel, et_video_metadata, et_audio_metadata; private RtcEngine engine; private int myUid; private boolean joined = false; @@ -70,24 +72,29 @@ public class VideoMetadata extends BaseFragment implements View.OnClickListener /** * Meta data to be sent */ - private byte[] metadata; + private byte[] videoMetadata; @Nullable @Override public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { - View view = inflater.inflate(R.layout.fragment_video_metadata, container, false); + View view = inflater.inflate(R.layout.fragment_media_metadata, container, false); return view; } @Override public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); - send = view.findViewById(R.id.btn_send); - send.setOnClickListener(this); - send.setEnabled(false); join = view.findViewById(R.id.btn_join); et_channel = view.findViewById(R.id.et_channel); - view.findViewById(R.id.btn_join).setOnClickListener(this); + et_audio_metadata= view.findViewById(R.id.et_audio_metadata); + et_video_metadata= view.findViewById(R.id.et_video_metadata); + btn_send_audio_metadata = view.findViewById(R.id.btn_send_audio_metadata); + btn_send_video_metadata = view.findViewById(R.id.btn_send_video_metadata); + join.setOnClickListener(this); + btn_send_audio_metadata.setOnClickListener(this); + btn_send_video_metadata.setOnClickListener(this); + btn_send_audio_metadata.setEnabled(false); + btn_send_video_metadata.setEnabled(false); fl_local = view.findViewById(R.id.fl_local); fl_remote = view.findViewById(R.id.fl_remote); } @@ -143,6 +150,8 @@ public void onActivityCreated(@Nullable Bundle savedInstanceState) { // This api can only be used in the private media server scenario, otherwise some problems may occur. engine.setLocalAccessPoint(localAccessPointConfiguration); } + + engine.setParameters("{\"rtc.use_audio4\":true}"); } catch (Exception e) { e.printStackTrace(); @@ -202,14 +211,23 @@ public void onClick(View v) { * 2:If you call the leaveChannel method during CDN live streaming, the SDK * triggers the removeInjectStreamUrl method.*/ engine.leaveChannel(); - send.setEnabled(false); + btn_send_audio_metadata.setEnabled(false); + btn_send_video_metadata.setEnabled(false); join.setText(getString(R.string.join)); } - } else if (v.getId() == R.id.btn_send) { - /*Click once, the metadata is sent once. - * {@link VideoMetadata#iMetadataObserver}. - * The metadata here can be flexibly replaced according to your own business.*/ - metadata = String.valueOf(System.currentTimeMillis()).getBytes(Charset.forName("UTF-8")); + } else if(v.getId() == R.id.btn_send_video_metadata){ + String text = et_video_metadata.getText().toString(); + if(!TextUtils.isEmpty(text)){ + /*Click once, the metadata is sent once. + * {@link VideoMetadata#iMetadataObserver}. + * The metadata here can be flexibly replaced according to your own business.*/ + videoMetadata = text.getBytes(StandardCharsets.UTF_8); + } + } else if(v.getId() == R.id.btn_send_audio_metadata){ + String text = et_audio_metadata.getText().toString(); + if(!TextUtils.isEmpty(text)){ + engine.sendAudioMetadata(text.getBytes(StandardCharsets.UTF_8)); + } } } @@ -287,13 +305,13 @@ public int getMaxMetadataSize() { @Override public byte[] onReadyToSendMetadata(long timeStampMs, int sourceType) { /*Check if the metadata is empty.*/ - if (metadata == null) { + if (videoMetadata == null) { return null; } Log.i(TAG, "There is metadata to send!"); /*Recycle metadata objects.*/ - byte[] toBeSend = metadata; - metadata = null; + byte[] toBeSend = videoMetadata; + videoMetadata = null; if (toBeSend.length > MAX_META_SIZE) { Log.e(TAG, String.format("Metadata exceeding max length %d!", MAX_META_SIZE)); return null; @@ -368,7 +386,8 @@ public void onJoinChannelSuccess(String channel, int uid, int elapsed) { handler.post(new Runnable() { @Override public void run() { - send.setEnabled(true); + btn_send_audio_metadata.setEnabled(true); + btn_send_video_metadata.setEnabled(true); join.setEnabled(true); join.setText(getString(R.string.leave)); } @@ -509,5 +528,12 @@ public void run() { } }); } + + @Override + public void onAudioMetadataReceived(int uid, byte[] data) { + super.onAudioMetadataReceived(uid, data); + Log.i(TAG, String.format("onAudioMetadataReceived uid=%d data=%s", uid, new String(data, StandardCharsets.UTF_8))); + showShortToast("Audio Metadata: uid=" + uid + ", data=" + new String(data, StandardCharsets.UTF_8)); + } }; } diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/PictureInPicture.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/PictureInPicture.java index 0ea3c8267..13821dd41 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/PictureInPicture.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/PictureInPicture.java @@ -6,10 +6,14 @@ import android.Manifest; import android.annotation.SuppressLint; +import android.app.AppOpsManager; +import android.app.PictureInPictureParams; import android.content.Context; +import android.content.pm.PackageManager; import android.os.Build; import android.os.Bundle; import android.util.Log; +import android.util.Rational; import android.view.LayoutInflater; import android.view.SurfaceView; import android.view.TextureView; @@ -21,6 +25,8 @@ import androidx.annotation.NonNull; import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; +import androidx.appcompat.app.AppCompatActivity; import androidx.fragment.app.FragmentActivity; import com.yanzhenjie.permission.AndPermission; @@ -54,14 +60,19 @@ public class PictureInPicture extends BaseFragment implements View.OnClickListener { private static final String TAG = PictureInPicture.class.getSimpleName(); - private FrameLayout remoteContainer; - private VideoReportLayout fl_local, fl_remote; - private Button join, switch_float_window; + private VideoReportLayout fl_local, fl_remote, fl_remote2, fl_remote3; + private Button join, switch_float_window, btn_pip; private EditText et_channel; private RtcEngine engine; private int myUid; private boolean joined = false; private AVCallFloatView floatWindowView; + private ViewGroup video_layout_container; + private ViewGroup video_layout; + private ViewGroup ll_join; + + @RequiresApi(26) + private PictureInPictureParams.Builder pictureInPictureParamsBuilder = new PictureInPictureParams.Builder(); @Nullable @Override @@ -74,13 +85,19 @@ public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup c public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); join = view.findViewById(R.id.btn_join); - remoteContainer = view.findViewById(R.id.fl_remote_container); switch_float_window = view.findViewById(R.id.btn_float_window); et_channel = view.findViewById(R.id.et_channel); view.findViewById(R.id.btn_join).setOnClickListener(this); + view.findViewById(R.id.btn_pip).setOnClickListener(this); switch_float_window.setOnClickListener(this); fl_local = view.findViewById(R.id.fl_local); fl_remote = view.findViewById(R.id.fl_remote); + fl_remote2 = view.findViewById(R.id.fl_remote2); + fl_remote3 = view.findViewById(R.id.fl_remote3); + video_layout_container = view.findViewById(R.id.video_layout_container); + video_layout = view.findViewById(R.id.video_layout); + ll_join = view.findViewById(R.id.ll_join); + btn_pip = view.findViewById(R.id.btn_pip); } @Override @@ -205,9 +222,23 @@ public void onClick(View v) { join.setText(getString(R.string.join)); fl_remote.setReportUid(-1); fl_remote.removeAllViews(); + fl_remote2.setReportUid(-1); + fl_remote2.removeAllViews(); + fl_remote3.setReportUid(-1); + fl_remote3.removeAllViews(); } } else if (v.getId() == switch_float_window.getId()) { showFloatWindow(); + } else if (v.getId() == R.id.btn_pip) { + if (checkPipSupported()) { + if (checkPipEnabled()) { + enterPip(); + } else { + showLongToast("Please enable Picture-in-Picture mode in the system settings"); + } + } else { + showLongToast("The device does not support Picture-in-Picture mode"); + } } } @@ -414,17 +445,18 @@ public void onUserJoined(int uid, int elapsed) { if (context == null) { return; } - if (fl_remote.getReportUid() > 0) { - return; - } - handler.post(() -> { + runOnUIThread(() -> { + VideoReportLayout view = getAvailableView(); + if (view == null) { + return; + } /*Display remote video stream*/ TextureView surfaceView = null; // Create render view by RtcEngine surfaceView = new TextureView(context); - fl_remote.setReportUid(uid); + view.setReportUid(uid); // Add to the remote container - fl_remote.addView(surfaceView, new FrameLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT)); + view.addView(surfaceView, new FrameLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT)); // Setup remote video to render engine.setupRemoteVideo(new VideoCanvas(surfaceView, RENDER_MODE_HIDDEN, uid)); }); @@ -444,45 +476,44 @@ public void onUserJoined(int uid, int elapsed) { public void onUserOffline(int uid, int reason) { Log.i(TAG, String.format("user %d offline! reason:%d", uid, reason)); showLongToast(String.format("user %d offline! reason:%d", uid, reason)); - handler.post(new Runnable() { - @Override - public void run() { - /*Clear render view - Note: The video will stay at its last frame, to completely remove it you will need to - remove the SurfaceView from its parent*/ - engine.setupRemoteVideo(new VideoCanvas(null, RENDER_MODE_HIDDEN, uid)); - if (fl_remote.getReportUid() == uid) { - fl_remote.setReportUid(-1); - fl_remote.removeAllViews(); - } + runOnUIThread(() -> { + /*Clear render view + Note: The video will stay at its last frame, to completely remove it you will need to + remove the SurfaceView from its parent*/ + engine.setupRemoteVideo(new VideoCanvas(null, RENDER_MODE_HIDDEN, uid)); + VideoReportLayout remoteView = getRemoteView(uid); + if (remoteView != null) { + remoteView.setReportUid(-1); + remoteView.removeAllViews(); } }); } - @Override - public void onLocalAudioStats(LocalAudioStats stats) { - super.onLocalAudioStats(stats); - fl_local.setLocalAudioStats(stats); - } - - @Override - public void onRemoteAudioStats(RemoteAudioStats stats) { - super.onRemoteAudioStats(stats); - fl_remote.setRemoteAudioStats(stats); - } + }; - @Override - public void onLocalVideoStats(Constants.VideoSourceType source, LocalVideoStats stats) { - super.onLocalVideoStats(source, stats); - fl_local.setLocalVideoStats(stats); + private VideoReportLayout getAvailableView() { + if (fl_remote.getChildCount() == 0) { + return fl_remote; + } else if (fl_remote2.getChildCount() == 0) { + return fl_remote2; + } else if (fl_remote3.getChildCount() == 0) { + return fl_remote3; + } else { + return null; } + } - @Override - public void onRemoteVideoStats(RemoteVideoStats stats) { - super.onRemoteVideoStats(stats); - fl_remote.setRemoteVideoStats(stats); + private VideoReportLayout getRemoteView(int uid) { + if (fl_remote.getReportUid() == uid) { + return fl_remote; + } else if (fl_remote2.getReportUid() == uid) { + return fl_remote2; + } else if (fl_remote3.getReportUid() == uid) { + return fl_remote3; + } else { + return null; } - }; + } private void showFloatWindow() { FragmentActivity context = requireActivity(); @@ -499,8 +530,9 @@ private void showFloatWindow() { dismissFloatWindow(); }); FrameLayout container = floatView.findViewById(R.id.fl_container); - remoteContainer.removeView(fl_remote); - container.addView(fl_remote); + + video_layout_container.removeView(video_layout); + container.addView(video_layout); } else { FloatWindowHelper.applyPermission(context); @@ -512,9 +544,10 @@ private void dismissFloatWindow() { return; } FrameLayout container = floatWindowView.findViewById(R.id.fl_container); + if (container.getChildCount() > 0) { - container.removeView(fl_remote); - remoteContainer.addView(fl_remote); + container.removeView(video_layout); + video_layout_container.addView(video_layout); } FloatWindowHelper.destroyFloatView(floatWindowView); @@ -526,4 +559,47 @@ private boolean isFloatWindowShowing() { } + private boolean checkPipSupported(){ + if(Build.VERSION.SDK_INT < 26){ + return false; + } + return requireActivity().getPackageManager().hasSystemFeature(PackageManager.FEATURE_PICTURE_IN_PICTURE); + } + + private boolean checkPipEnabled() { + if (android.os.Build.VERSION.SDK_INT < 26) { + return false; + } + AppOpsManager appOpsManager = requireActivity().getSystemService(AppOpsManager.class); + return appOpsManager.checkOpNoThrow(AppOpsManager.OPSTR_PICTURE_IN_PICTURE, + android.os.Process.myUid(), + requireActivity().getPackageName()) + == AppOpsManager.MODE_ALLOWED; + } + + private void enterPip(){ + if (android.os.Build.VERSION.SDK_INT < 26) { + return; + } + requireActivity().enterPictureInPictureMode(pictureInPictureParamsBuilder + .setAspectRatio(new Rational(video_layout_container.getWidth(), video_layout_container.getHeight())) + .build()); + + ((AppCompatActivity)requireActivity()).getSupportActionBar().hide(); + ll_join.setVisibility(View.GONE); + btn_pip.setVisibility(View.GONE); + switch_float_window.setVisibility(View.GONE); + } + + @Override + public void onPictureInPictureModeChanged(boolean isInPictureInPictureMode) { + super.onPictureInPictureModeChanged(isInPictureInPictureMode); + if (!isInPictureInPictureMode) { + ((AppCompatActivity)requireActivity()).getSupportActionBar().show(); + ll_join.setVisibility(View.VISIBLE); + btn_pip.setVisibility(View.VISIBLE); + switch_float_window.setVisibility(View.VISIBLE); + } + } + } diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ProcessAudioRawData.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ProcessAudioRawData.java index 91d007ca6..3013553a8 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ProcessAudioRawData.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ProcessAudioRawData.java @@ -345,10 +345,7 @@ public boolean onEarMonitoringAudioFrame(int type, int samplesPerChannel, int by } @Override - public boolean onPlaybackAudioFrameBeforeMixing(String channel, int uid, int audioFrameType, - int samples, int bytesPerSample, int channels, - int samplesPerSec, ByteBuffer byteBuffer, - long renderTimeMs, int bufferLength) { + public boolean onPlaybackAudioFrameBeforeMixing(String channelId, int uid, int type, int samplesPerChannel, int bytesPerSample, int channels, int samplesPerSec, ByteBuffer buffer, long renderTimeMs, int avsync_type, int rtpTimestamp) { return false; } diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ThirdPartyBeauty.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ThirdPartyBeauty.java index 8e321bad9..966332a5c 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ThirdPartyBeauty.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ThirdPartyBeauty.java @@ -99,7 +99,7 @@ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceStat @Override public void onDestroy() { super.onDestroy(); - SenseTimeBeautySDK.INSTANCE.release(); - FaceUnityBeautySDK.INSTANCE.release(); + SenseTimeBeautySDK.INSTANCE.unInitBeautySDK(); + FaceUnityBeautySDK.INSTANCE.unInitBeauty(); } } diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/ByteDanceBeauty.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/ByteDanceBeauty.java index 42503bb14..2e05ce96b 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/ByteDanceBeauty.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/ByteDanceBeauty.java @@ -1,6 +1,5 @@ package io.agora.api.example.examples.advanced.beauty; -import android.graphics.Matrix; import android.os.Bundle; import android.view.LayoutInflater; import android.view.TextureView; @@ -11,8 +10,6 @@ import androidx.annotation.NonNull; import androidx.annotation.Nullable; -import com.bytedance.labcv.effectsdk.RenderManager; - import java.io.IOException; import java.util.Locale; import java.util.Random; @@ -22,7 +19,6 @@ import io.agora.api.example.common.widget.VideoReportLayout; import io.agora.api.example.databinding.FragmentBeautyBytedanceBinding; import io.agora.api.example.utils.TokenUtils; -import io.agora.beautyapi.bytedance.BeautyPreset; import io.agora.beautyapi.bytedance.ByteDanceBeautyAPI; import io.agora.beautyapi.bytedance.ByteDanceBeautyAPIKt; import io.agora.beautyapi.bytedance.CameraConfig; @@ -40,9 +36,6 @@ * The type Byte dance beauty. */ public class ByteDanceBeauty extends BaseFragment { - private static final String TAG = "SceneTimeBeauty"; - private static final Matrix IDENTITY_MATRIX = new Matrix(); - private static final String LICENSE_NAME = "agora_test_20220805_20230815_io.agora.test.entfull_4.2.3.licbag"; private FragmentBeautyBytedanceBinding mBinding; private RtcEngine rtcEngine; private String channelId; @@ -85,13 +78,20 @@ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceStat rtcEngine.setColorEnhanceOptions(isChecked, options); }); - byteDanceBeautyAPI.initialize(new Config(requireContext(), rtcEngine, ByteDanceBeautySDK.INSTANCE.getRenderManager(), new EventCallback(beautyStats -> null, () -> { - ByteDanceBeautySDK.INSTANCE.initEffect(requireContext()); - return null; - }, () -> { - ByteDanceBeautySDK.INSTANCE.unInitEffect(); - return null; - }), CaptureMode.Agora, 0, false, new CameraConfig())); + byteDanceBeautyAPI.initialize(new Config(requireContext(), rtcEngine, + ByteDanceBeautySDK.INSTANCE.getRenderManager(), + new EventCallback(beautyStats -> null, + () -> { + ByteDanceBeautySDK.INSTANCE.initEffect(requireContext()); + return null; + }, + () -> { + ByteDanceBeautySDK.INSTANCE.unInitEffect(); + return null; + }), + CaptureMode.Agora, + 0, + false, new CameraConfig())); byteDanceBeautyAPI.enable(true); } @@ -114,23 +114,28 @@ protected void onBackPressed() { private void initVideoView() { mBinding.cbFaceBeautify.setOnCheckedChangeListener((buttonView, isChecked) -> { - byteDanceBeautyAPI.setBeautyPreset(isChecked ? BeautyPreset.DEFAULT : BeautyPreset.CUSTOM, - ByteDanceBeautySDK.INSTANCE.getBeautyNodePath(), - ByteDanceBeautySDK.INSTANCE.getBeauty4ItemsNodePath(), - ByteDanceBeautySDK.INSTANCE.getReSharpNodePath()); + ByteDanceBeautySDK.INSTANCE.getBeautyConfig().setWhiten( + isChecked ? 1.0f : 0.0f + ); }); mBinding.cbMakeup.setOnCheckedChangeListener((buttonView, isChecked) -> { - RenderManager renderManager = ByteDanceBeautySDK.INSTANCE.getRenderManager(); - renderManager.appendComposerNodes(new String[]{ByteDanceBeautySDK.INSTANCE.getMakeupTianmeiNodePath()}); - renderManager.updateComposerNodes(ByteDanceBeautySDK.INSTANCE.getMakeupTianmeiNodePath(), "Filter_ALL", isChecked ? 0.5f : 0.f); - renderManager.updateComposerNodes(ByteDanceBeautySDK.INSTANCE.getMakeupTianmeiNodePath(), "Makeup_ALL", isChecked ? 0.5f : 0f); + if (isChecked) { + ByteDanceBeautySDK.INSTANCE.getBeautyConfig().setMakeUp( + new ByteDanceBeautySDK.MakeUpItem( + requireContext(), + "yuanqi", + 1.0f + ) + ); + } else { + ByteDanceBeautySDK.INSTANCE.getBeautyConfig().setMakeUp(null); + } }); mBinding.cbSticker.setOnCheckedChangeListener((buttonView, isChecked) -> { - RenderManager renderManager = ByteDanceBeautySDK.INSTANCE.getRenderManager(); if (isChecked) { - renderManager.setSticker(ByteDanceBeautySDK.INSTANCE.getStickerPath() + "/wochaotian"); + ByteDanceBeautySDK.INSTANCE.getBeautyConfig().setSticker("zhaocaimao"); } else { - renderManager.setSticker(null); + ByteDanceBeautySDK.INSTANCE.getBeautyConfig().setSticker(null); } }); mBinding.ivCamera.setOnClickListener(v -> { diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/ByteDanceBeautySDK.kt b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/ByteDanceBeautySDK.kt index d8eb4b274..05c9015be 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/ByteDanceBeautySDK.kt +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/ByteDanceBeautySDK.kt @@ -1,157 +1,134 @@ package io.agora.api.example.examples.advanced.beauty -import android.app.Application import android.content.Context import android.util.Log -import com.bytedance.labcv.effectsdk.RenderManager -import io.agora.api.example.utils.FileKtUtils -import java.util.concurrent.Executors - -/** - * Byte dance beauty s d k - * - * @constructor Create empty Byte dance beauty s d k - */ -object ByteDanceBeautySDK { - private val TAG = "ByteDanceBeautySDK" - - private val LICENSE_NAME = "Agora_test_20231116_20240116_io.agora.test.entfull_4.5.0_893.licbag" - private val workerThread = Executors.newSingleThreadExecutor() - private var context: Application? = null - private var storagePath = "" - private var assetsPath = "" +import com.effectsar.labcv.effectsdk.RenderManager +import io.agora.api.example.utils.FileUtils +import io.agora.beautyapi.bytedance.ByteDanceBeautyAPI +import java.io.File - /** - * Render manager - */ - val renderManager = RenderManager() - - /** - * License path - */ - var licensePath = "" +object ByteDanceBeautySDK { - /** - * Models path - */ - var modelsPath = "" + private const val TAG = "ByteDanceBeautySDK" - /** - * Beauty node path - */ + private val LICENSE_NAME = "Agora_test_20240111_20240411_io.agora.test.entfull_4.5.0_1111.licbag" + private var storagePath = "" + private var assetsPath = "" + private var licensePath = "" + private var modelsPath = "" var beautyNodePath = "" - - /** - * Beauty4items node path - */ var beauty4ItemsNodePath = "" - - /** - * Re sharp node path - */ var reSharpNodePath = "" - - /** - * Sticker path - */ var stickerPath = "" + private val nodesLoaded = mutableListOf() - /** - * Makeup tianmei node path - */ - var makeupTianmeiNodePath = "" - get() { - if(field.isEmpty()){ - // copy makeup node - field = "$storagePath/beauty_bytedance/ComposeMakeup.bundle/ComposeMakeup/style_makeup/tianmei" - FileKtUtils.copyAssets(context!!, "$assetsPath/ComposeMakeup.bundle/ComposeMakeup/style_makeup/tianmei", field) - renderManager.appendComposerNodes(arrayOf(field)) - renderManager.loadResourceWithTimeout(-1) - } - return field - } + private var beautyAPI: ByteDanceBeautyAPI? = null - /** - * Makeup yuan qi node path - */ - var makeupYuanQiNodePath = "" - get() { - if(field.isEmpty()){ - // copy makeup node - field = "$storagePath/beauty_bytedance/ComposeMakeup.bundle/ComposeMakeup/style_makeup/yuanqi" - FileKtUtils.copyAssets(context!!, "$assetsPath/ComposeMakeup.bundle/ComposeMakeup/style_makeup/yuanqi", field) - renderManager.appendComposerNodes(arrayOf(field)) - renderManager.loadResourceWithTimeout(-1) - } - return field - } + // 特效句柄 + val renderManager = RenderManager() + + // 美颜配置 + val beautyConfig = BeautyConfig() - /** - * Init beauty s d k - * - * @param context - */ - fun initBeautySDK(context: Context){ - this.context = context.applicationContext as? Application - storagePath = context.getExternalFilesDir("")?.absolutePath ?: return + + fun initBeautySDK(context: Context): Boolean { + storagePath = context.getExternalFilesDir("")?.absolutePath ?: return false assetsPath = "beauty_bytedance" - workerThread.execute { - // copy license - licensePath = "$storagePath/beauty_bytedance/LicenseBag.bundle/$LICENSE_NAME" - FileKtUtils.copyAssets(context, "$assetsPath/LicenseBag.bundle/$LICENSE_NAME", licensePath) + // copy license + licensePath = "$storagePath/beauty_bytedance/LicenseBag.bundle" + FileUtils.copyFilesFromAssets(context, "$assetsPath/LicenseBag.bundle", licensePath) + licensePath += "/$LICENSE_NAME" + if (!File(licensePath).exists()) { + return false + } - // copy models - modelsPath = "$storagePath/beauty_bytedance/ModelResource.bundle" - FileKtUtils.copyAssets(context, "$assetsPath/ModelResource.bundle", modelsPath) + // copy models + modelsPath = "$storagePath/beauty_bytedance/ModelResource.bundle" + FileUtils.copyFilesFromAssets(context, "$assetsPath/ModelResource.bundle", modelsPath) - // copy beauty node - beautyNodePath = "$storagePath/beauty_bytedance/ComposeMakeup.bundle/ComposeMakeup/beauty_Android_lite" - FileKtUtils.copyAssets(context, "$assetsPath/ComposeMakeup.bundle/ComposeMakeup/beauty_Android_lite", beautyNodePath) + // copy beauty node + beautyNodePath = + "$storagePath/beauty_bytedance/ComposeMakeup.bundle/ComposeMakeup/beauty_Android_lite" + FileUtils.copyFilesFromAssets( + context, + "$assetsPath/ComposeMakeup.bundle/ComposeMakeup/beauty_Android_lite", + beautyNodePath + ) - // copy beauty 4items node - beauty4ItemsNodePath = "$storagePath/beauty_bytedance/ComposeMakeup.bundle/ComposeMakeup/beauty_4Items" - FileKtUtils.copyAssets(context, "$assetsPath/ComposeMakeup.bundle/ComposeMakeup/beauty_4Items", beauty4ItemsNodePath) + // copy beauty 4items node + beauty4ItemsNodePath = + "$storagePath/beauty_bytedance/ComposeMakeup.bundle/ComposeMakeup/beauty_4Items" + FileUtils.copyFilesFromAssets( + context, + "$assetsPath/ComposeMakeup.bundle/ComposeMakeup/beauty_4Items", + beauty4ItemsNodePath + ) - // copy resharp node - reSharpNodePath = "$storagePath/beauty_bytedance/ComposeMakeup.bundle/ComposeMakeup/reshape_lite" - FileKtUtils.copyAssets(context, "$assetsPath/ComposeMakeup.bundle/ComposeMakeup/reshape_lite", reSharpNodePath) + // copy resharp node + reSharpNodePath = + "$storagePath/beauty_bytedance/ComposeMakeup.bundle/ComposeMakeup/reshape_lite" + FileUtils.copyFilesFromAssets( + context, + "$assetsPath/ComposeMakeup.bundle/ComposeMakeup/reshape_lite", + reSharpNodePath + ) + // copy stickers + stickerPath = "$storagePath/beauty_bytedance/StickerResource.bundle/stickers" + FileUtils.copyFilesFromAssets(context, "$assetsPath/StickerResource.bundle/stickers", stickerPath) - // copy stickers - stickerPath = "$storagePath/beauty_bytedance/StickerResource.bundle/stickers" - FileKtUtils.copyAssets(context, "$assetsPath/StickerResource.bundle/stickers", stickerPath) - } + return true } - /** - * Init effect - * - * @param context - */// GL Thread - fun initEffect(context: Context){ + // GL Thread + fun initEffect(context: Context) { val ret = renderManager.init( context, modelsPath, licensePath, false, false, 0 ) - if(!checkResult("RenderManager init ", ret)){ + if (!checkResult("RenderManager init ", ret)) { return } renderManager.useBuiltinSensor(true) renderManager.set3Buffer(false) - renderManager.appendComposerNodes(arrayOf(beautyNodePath, beauty4ItemsNodePath, reSharpNodePath)) + nodesLoaded.add(beautyNodePath) + renderManager.appendComposerNodes( + nodesLoaded.toTypedArray() + ) renderManager.loadResourceWithTimeout(-1) + beautyConfig.resume() } - /** - * Un init effect - * - */// GL Thread - fun unInitEffect(){ + // GL Thread + fun unInitEffect() { + beautyAPI = null + nodesLoaded.clear() + beautyConfig.reset() renderManager.release() } + private fun mayLoadBeauty4ItemsNode() { + if (!nodesLoaded.contains(beauty4ItemsNodePath)) { + nodesLoaded.add(beauty4ItemsNodePath) + renderManager.appendComposerNodes( + arrayOf(beauty4ItemsNodePath) + ) + renderManager.loadResourceWithTimeout(-1) + } + } + + private fun mayLoadReSharpNode() { + if (!nodesLoaded.contains(reSharpNodePath)) { + nodesLoaded.add(reSharpNodePath) + renderManager.appendComposerNodes( + arrayOf(reSharpNodePath) + ) + renderManager.loadResourceWithTimeout(-1) + } + } + private fun checkResult(msg: String, ret: Int): Boolean { if (ret != 0 && ret != -11 && ret != 1) { val log = "$msg error: $ret" @@ -161,88 +138,376 @@ object ByteDanceBeautySDK { return true } - /** - * Set beauty - * - * @param smooth - * @param whiten - * @param thinFace - * @param enlargeEye - * @param redden - * @param shrinkCheekbone - * @param shrinkJawbone - * @param whiteTeeth - * @param hairlineHeight - * @param narrowNose - * @param mouthSize - * @param chinLength - * @param brightEye - * @param darkCircles - * @param nasolabialFolds - */ - fun setBeauty( - smooth: Float? = null, - whiten: Float? = null, - thinFace: Float? = null, - enlargeEye: Float? = null, - redden: Float? = null, - shrinkCheekbone: Float? = null, - shrinkJawbone: Float? = null, - whiteTeeth: Float? = null, - hairlineHeight: Float? = null, - narrowNose: Float? = null, - mouthSize: Float? = null, - chinLength: Float? = null, - brightEye: Float? = null, - darkCircles: Float? = null, - nasolabialFolds: Float? = null - ){ + internal fun setBeautyAPI(beautyAPI: ByteDanceBeautyAPI?) { + this.beautyAPI = beautyAPI + } + + private fun runOnBeautyThread(run: () -> Unit) { + beautyAPI?.runOnProcessThread(run) ?: run.invoke() + } + + + class BeautyConfig { + // 磨皮 - smooth?.let { renderManager.updateComposerNodes(beautyNodePath, "smooth", it) } + var smooth = 0.65f + set(value) { + field = value + runOnBeautyThread { + renderManager.updateComposerNodes(beautyNodePath, "smooth", value) + } + } // 美白 - whiten?.let { renderManager.updateComposerNodes(beautyNodePath, "whiten", it) } + var whiten = 0.5f + set(value) { + field = value + runOnBeautyThread { + renderManager.updateComposerNodes(beautyNodePath, "whiten", value) + } + } // 红润 - redden?.let { renderManager.updateComposerNodes(beautyNodePath, "sharp", it) } - + var redden = 0.0f + set(value) { + field = value + runOnBeautyThread { + renderManager.updateComposerNodes(beautyNodePath, "sharp", value) + } + } // 瘦脸 - thinFace?.let { renderManager.updateComposerNodes(reSharpNodePath, "Internal_Deform_Overall", it) } + var thinFace = 0.3f + set(value) { + field = value + runOnBeautyThread { + if (value > 0) { + mayLoadReSharpNode() + } + renderManager.updateComposerNodes( + reSharpNodePath, + "Internal_Deform_Overall", + value + ) + } + } // 大眼 - enlargeEye?.let { renderManager.updateComposerNodes(reSharpNodePath, "Internal_Deform_Eye", it) } - + var enlargeEye = 0.0f + set(value) { + field = value + runOnBeautyThread { + if (value > 0) { + mayLoadReSharpNode() + } + renderManager.updateComposerNodes(reSharpNodePath, "Internal_Deform_Eye", value) + } + } // 瘦颧骨 - shrinkCheekbone?.let { renderManager.updateComposerNodes(reSharpNodePath, "Internal_Deform_Zoom_Cheekbone", it) } + var shrinkCheekbone = 0.3f + set(value) { + field = value + runOnBeautyThread { + if (value > 0) { + mayLoadReSharpNode() + } + renderManager.updateComposerNodes( + reSharpNodePath, + "Internal_Deform_Zoom_Cheekbone", + value + ) + } + } // 下颌骨 - shrinkJawbone?.let { renderManager.updateComposerNodes(reSharpNodePath, "Internal_Deform_Zoom_Jawbone", it) } + var shrinkJawbone = 0.0f + set(value) { + field = value + runOnBeautyThread { + if (value > 0) { + mayLoadReSharpNode() + } + renderManager.updateComposerNodes( + reSharpNodePath, + "Internal_Deform_Zoom_Jawbone", + value + ) + } + } // 美牙 - whiteTeeth?.let { renderManager.updateComposerNodes(reSharpNodePath, "BEF_BEAUTY_WHITEN_TEETH", it) } + var whiteTeeth = 0.0f + set(value) { + field = value + runOnBeautyThread { + if (value > 0) { + mayLoadReSharpNode() + } + renderManager.updateComposerNodes( + reSharpNodePath, + "BEF_BEAUTY_WHITEN_TEETH", + value + ) + } + } // 额头 - hairlineHeight?.let { renderManager.updateComposerNodes(reSharpNodePath, "Internal_Deform_Forehead", it) } + var hairlineHeight = 0.0f + set(value) { + field = value + runOnBeautyThread { + if (value > 0) { + mayLoadReSharpNode() + } + renderManager.updateComposerNodes( + reSharpNodePath, + "Internal_Deform_Forehead", + value + ) + } + } // 瘦鼻 - narrowNose?.let { renderManager.updateComposerNodes(reSharpNodePath, "Internal_Deform_Nose", it) } + var narrowNose = 0.0f + set(value) { + field = value + runOnBeautyThread { + if (value > 0) { + mayLoadReSharpNode() + } + renderManager.updateComposerNodes( + reSharpNodePath, + "Internal_Deform_Nose", + value + ) + } + } // 嘴形 - mouthSize?.let { renderManager.updateComposerNodes(reSharpNodePath, "Internal_Deform_ZoomMouth", it) } + var mouthSize = 0.0f + set(value) { + field = value + runOnBeautyThread { + if (value > 0) { + mayLoadReSharpNode() + } + renderManager.updateComposerNodes( + reSharpNodePath, + "Internal_Deform_ZoomMouth", + value + ) + } + } // 下巴 - chinLength?.let { renderManager.updateComposerNodes(reSharpNodePath, "Internal_Deform_Chin", it) } + var chinLength = 0.0f + set(value) { + field = value + runOnBeautyThread { + if (value > 0) { + mayLoadReSharpNode() + } + renderManager.updateComposerNodes( + reSharpNodePath, + "Internal_Deform_Chin", + value + ) + } + } // 亮眼 - brightEye?.let { renderManager.updateComposerNodes(beauty4ItemsNodePath, "BEF_BEAUTY_BRIGHTEN_EYE", it) } + var brightEye = 0.0f + set(value) { + field = value + runOnBeautyThread { + if (value > 0) { + mayLoadBeauty4ItemsNode() + } + renderManager.updateComposerNodes( + beauty4ItemsNodePath, + "BEF_BEAUTY_BRIGHTEN_EYE", + value + ) + } + } // 祛黑眼圈 - darkCircles?.let { renderManager.updateComposerNodes(beauty4ItemsNodePath, "BEF_BEAUTY_REMOVE_POUCH", it) } + var darkCircles = 0.0f + set(value) { + field = value + runOnBeautyThread { + if (value > 0) { + mayLoadBeauty4ItemsNode() + } + renderManager.updateComposerNodes( + beauty4ItemsNodePath, + "BEF_BEAUTY_REMOVE_POUCH", + value + ) + } + } // 祛法令纹 - nasolabialFolds?.let { renderManager.updateComposerNodes(beauty4ItemsNodePath, "BEF_BEAUTY_SMILES_FOLDS", it) } + var nasolabialFolds = 0.0f + set(value) { + field = value + runOnBeautyThread { + if (value > 0) { + mayLoadBeauty4ItemsNode() + } + renderManager.updateComposerNodes( + beauty4ItemsNodePath, + "BEF_BEAUTY_SMILES_FOLDS", + value + ) + } + } + + // 锐化 + var sharpen = 0.0f + set(value) { + field = value + runOnBeautyThread { + renderManager.updateComposerNodes( + beautyNodePath, + "sharp", + value + ) + } + } + // 清晰度 + var clear = 0.0f + set(value) { + field = value + runOnBeautyThread { + renderManager.updateComposerNodes( + beautyNodePath, + "clear", + value + ) + } + } + + + // 美妆 + var makeUp: MakeUpItem? = null + set(value) { + if (field == value) { + return + } + val oMakeUp = field + field = value + if (oMakeUp?.style != value?.style) { + if (oMakeUp != null) { + runOnBeautyThread { + val oNodePath = + "$storagePath/beauty_bytedance/ComposeMakeup.bundle/ComposeMakeup/style_makeup/${oMakeUp.style}" + renderManager.removeComposerNodes(arrayOf(oNodePath)) + } + } + + if (value != null) { + val nodePath = + "$storagePath/beauty_bytedance/ComposeMakeup.bundle/ComposeMakeup/style_makeup/${value.style}" + FileUtils.copyFilesFromAssets( + value.context, + "$assetsPath/ComposeMakeup.bundle/ComposeMakeup/style_makeup/${value.style}", + nodePath + ) + runOnBeautyThread { + renderManager.appendComposerNodes(arrayOf(nodePath)) + renderManager.loadResourceWithTimeout(-1) + } + } + } + + if (value != null) { + val nodePath = + "$storagePath/beauty_bytedance/ComposeMakeup.bundle/ComposeMakeup/style_makeup/${value.style}" + runOnBeautyThread { + renderManager.updateComposerNodes( + nodePath, + "Filter_ALL", + value.identity + ) + renderManager.updateComposerNodes( + nodePath, + "Makeup_ALL", + value.identity + ) + } + } + } + + + // 贴纸 + var sticker: String? = null + set(value) { + if (field == value) { + return + } + field = value + runOnBeautyThread { + if (value != null) { + renderManager.setSticker("$stickerPath/$value") + } else { + renderManager.setSticker(null) + } + } + } + + internal fun reset() { + smooth = 0.65f + whiten = 0.5f + thinFace = 0.3f + enlargeEye = 0.0f + redden = 0.0f + shrinkCheekbone = 0.3f + shrinkJawbone = 0.0f + whiteTeeth = 0.0f + hairlineHeight = 0.0f + narrowNose = 0.0f + mouthSize = 0.0f + chinLength = 0.0f + brightEye = 0.0f + darkCircles = 0.0f + nasolabialFolds = 0.0f + sharpen = 0.0f + clear = 0.0f + + makeUp = null + sticker = null + } + + internal fun resume() { + smooth = smooth + whiten = whiten + thinFace = thinFace + enlargeEye = enlargeEye + redden = redden + shrinkCheekbone = shrinkCheekbone + shrinkJawbone = shrinkJawbone + whiteTeeth = whiteTeeth + hairlineHeight = hairlineHeight + narrowNose = narrowNose + mouthSize = mouthSize + chinLength = chinLength + brightEye = brightEye + darkCircles = darkCircles + nasolabialFolds = nasolabialFolds + sharpen = sharpen + clear = clear + + makeUp = makeUp + sticker = sticker + } } + + data class MakeUpItem( + val context: Context, + val style: String, + val identity: Float + ) } \ No newline at end of file diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/FaceUnityBeauty.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/FaceUnityBeauty.java index f88110c4d..64ffd435a 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/FaceUnityBeauty.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/FaceUnityBeauty.java @@ -12,14 +12,8 @@ import androidx.annotation.NonNull; import androidx.annotation.Nullable; -import com.faceunity.core.entity.FUBundleData; import com.faceunity.core.faceunity.FURenderKit; -import com.faceunity.core.model.bodyBeauty.BodyBeauty; -import com.faceunity.core.model.makeup.SimpleMakeup; -import com.faceunity.core.model.prop.Prop; -import com.faceunity.core.model.prop.sticker.Sticker; -import java.io.File; import java.lang.reflect.Method; import java.util.Locale; import java.util.Random; @@ -30,7 +24,6 @@ import io.agora.api.example.common.widget.VideoReportLayout; import io.agora.api.example.databinding.FragmentBeautyFaceunityBinding; import io.agora.api.example.utils.TokenUtils; -import io.agora.beautyapi.faceunity.BeautyPreset; import io.agora.beautyapi.faceunity.CameraConfig; import io.agora.beautyapi.faceunity.CaptureMode; import io.agora.beautyapi.faceunity.Config; @@ -79,7 +72,7 @@ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceStat initRtcEngine(); - faceUnityBeautyAPI.initialize(new Config(requireContext(), rtcEngine, FaceUnityBeautySDK.INSTANCE.getFuRenderKit(), null, CaptureMode.Agora, 0, false, new CameraConfig())); + faceUnityBeautyAPI.initialize(new Config(requireContext(), rtcEngine, FURenderKit.getInstance(), null, CaptureMode.Agora, 0, false, new CameraConfig())); faceUnityBeautyAPI.enable(true); joinChannel(); mBinding.switchVideoEffect.setOnCheckedChangeListener((buttonView, isChecked) -> { @@ -109,33 +102,27 @@ protected void onBackPressed() { private void initVideoView() { mBinding.cbFaceBeautify.setOnCheckedChangeListener((buttonView, isChecked) -> { - faceUnityBeautyAPI.setBeautyPreset(isChecked ? BeautyPreset.DEFAULT : BeautyPreset.CUSTOM); + FaceUnityBeautySDK.INSTANCE.getBeautyConfig().setWhiten( + isChecked ? 1.0f : 0.0f + ); }); mBinding.cbMakeup.setOnCheckedChangeListener((buttonView, isChecked) -> { - FURenderKit fuRenderKit = FaceUnityBeautySDK.INSTANCE.getFuRenderKit(); - if (isChecked) { - SimpleMakeup makeup = new SimpleMakeup(new FUBundleData("graphics" + File.separator + "face_makeup.bundle")); - makeup.setCombinedConfig(new FUBundleData("beauty_faceunity/makeup/naicha.bundle")); - makeup.setMakeupIntensity(1.0f); - fuRenderKit.setMakeup(makeup); - } else { - fuRenderKit.setMakeup(null); + if(isChecked){ + FaceUnityBeautySDK.INSTANCE.getBeautyConfig().setMakeUp( + new FaceUnityBeautySDK.MakeUpItem( + "makeup/diadiatu.bundle", + 1.0f + ) + ); + }else{ + FaceUnityBeautySDK.INSTANCE.getBeautyConfig().setMakeUp(null); } }); mBinding.cbSticker.setOnCheckedChangeListener((buttonView, isChecked) -> { - FURenderKit fuRenderKit = FaceUnityBeautySDK.INSTANCE.getFuRenderKit(); if (isChecked) { - Prop prop = new Sticker(new FUBundleData("beauty_faceunity/sticker/fu_zh_fenshu.bundle")); - fuRenderKit.getPropContainer().replaceProp(null, prop); + FaceUnityBeautySDK.INSTANCE.getBeautyConfig().setSticker("sticker/sdlu.bundle"); } else { - fuRenderKit.getPropContainer().removeAllProp(); - } - }); - mBinding.cbBodyBeauty.setOnCheckedChangeListener((buttonView, isChecked) -> { - FURenderKit fuRenderKit = FaceUnityBeautySDK.INSTANCE.getFuRenderKit(); - BodyBeauty bodyBeauty = fuRenderKit.getBodyBeauty(); - if (bodyBeauty != null) { - bodyBeauty.setEnable(isChecked); + FaceUnityBeautySDK.INSTANCE.getBeautyConfig().setSticker(null); } }); mBinding.ivCamera.setOnClickListener(v -> { diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/FaceUnityBeautySDK.kt b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/FaceUnityBeautySDK.kt index a9d041238..57bb76b5f 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/FaceUnityBeautySDK.kt +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/FaceUnityBeautySDK.kt @@ -1,61 +1,58 @@ package io.agora.api.example.examples.advanced.beauty import android.content.Context +import android.text.TextUtils import android.util.Log import com.faceunity.core.callback.OperateCallback import com.faceunity.core.entity.FUBundleData import com.faceunity.core.enumeration.FUAITypeEnum import com.faceunity.core.faceunity.FUAIKit -import com.faceunity.core.faceunity.FURenderConfig +import com.faceunity.core.faceunity.FURenderConfig.OPERATE_SUCCESS_AUTH import com.faceunity.core.faceunity.FURenderKit import com.faceunity.core.faceunity.FURenderManager import com.faceunity.core.model.facebeauty.FaceBeauty +import com.faceunity.core.model.makeup.SimpleMakeup +import com.faceunity.core.model.prop.sticker.Sticker import com.faceunity.core.utils.FULogger import com.faceunity.wrapper.faceunity +import io.agora.beautyapi.faceunity.FaceUnityBeautyAPI import java.io.File -import java.util.concurrent.Executors -/** - * Face unity beauty s d k - * - * @constructor Create empty Face unity beauty s d k - */ object FaceUnityBeautySDK { - private val TAG = "FaceUnityBeautySDK" - private val fuAIKit = FUAIKit.getInstance() - - /** - * Fu render kit - */ - val fuRenderKit = FURenderKit.getInstance() + private const val TAG = "FaceUnityBeautySDK" /* AI道具*/ - private val BUNDLE_AI_FACE = "model" + File.separator + "ai_face_processor.bundle" - private val BUNDLE_AI_HUMAN = "model" + File.separator + "ai_human_processor.bundle" + private const val BUNDLE_AI_FACE = "model/ai_face_processor.bundle" + private const val BUNDLE_AI_HUMAN = "model/ai_human_processor.bundle" + + // 美颜配置 + val beautyConfig = BeautyConfig() + + private var beautyAPI: FaceUnityBeautyAPI? = null - private val workerThread = Executors.newSingleThreadExecutor() + fun initBeauty(context: Context): Boolean { + val auth = try { + getAuth() + } catch (e: Exception) { + Log.w(TAG, e) + return false + } ?: return false - /** - * Init beauty - * - * @param context - */ - fun initBeauty(context: Context) { FURenderManager.setKitDebug(FULogger.LogLevel.TRACE) FURenderManager.setCoreDebug(FULogger.LogLevel.ERROR) - FURenderManager.registerFURender(context, getAuth(), object : OperateCallback { + FURenderManager.registerFURender(context, auth, object : OperateCallback { override fun onSuccess(code: Int, msg: String) { Log.i(TAG, "FURenderManager onSuccess -- code=$code, msg=$msg") - if (code == FURenderConfig.OPERATE_SUCCESS_AUTH) { + if (code == OPERATE_SUCCESS_AUTH) { faceunity.fuSetUseTexAsync(1) - workerThread.submit { - fuAIKit.loadAIProcessor(BUNDLE_AI_FACE, FUAITypeEnum.FUAITYPE_FACEPROCESSOR) - fuAIKit.loadAIProcessor( - BUNDLE_AI_HUMAN, - FUAITypeEnum.FUAITYPE_HUMAN_PROCESSOR - ) - } + FUAIKit.getInstance() + .loadAIProcessor(BUNDLE_AI_FACE, FUAITypeEnum.FUAITYPE_FACEPROCESSOR) + FUAIKit.getInstance().loadAIProcessor( + BUNDLE_AI_HUMAN, + FUAITypeEnum.FUAITYPE_HUMAN_PROCESSOR + ) + } } @@ -63,118 +60,264 @@ object FaceUnityBeautySDK { Log.e(TAG, "FURenderManager onFail -- code=$errCode, msg=$errMsg") } }) + return true } - /** - * Release - * - */ - fun release() { + fun unInitBeauty() { + beautyAPI = null + beautyConfig.reset() + FUAIKit.getInstance().releaseAllAIProcessor() FURenderKit.getInstance().release() } - /** - * Get auth - * - * @return - */ - private fun getAuth(): ByteArray { - try { - val authpack = Class.forName("io.agora.api.example.examples.advanced.beauty.authpack") - val aMethod = authpack.getDeclaredMethod("A") - aMethod.isAccessible = true - val authValue = aMethod.invoke(null) as? ByteArray - return authValue ?: ByteArray(0) - } catch (e: Exception){ - Log.e(TAG, "getAuth >> error : $e") - } - return ByteArray(0) + private fun getAuth(): ByteArray? { + val authpack = Class.forName("io.agora.api.example.examples.advanced.beauty.authpack") + val aMethod = authpack.getDeclaredMethod("A") + aMethod.isAccessible = true + return aMethod.invoke(null) as? ByteArray } - /** - * Set beauty - * - * @param smooth - * @param whiten - * @param thinFace - * @param enlargeEye - * @param redden - * @param shrinkCheekbone - * @param shrinkJawbone - * @param whiteTeeth - * @param hairlineHeight - * @param narrowNose - * @param mouthSize - * @param chinLength - * @param brightEye - * @param darkCircles - * @param nasolabialFolds - */ - fun setBeauty( - smooth: Double? = null, - whiten: Double? = null, - thinFace: Double? = null, - enlargeEye: Double? = null, - redden: Double? = null, - shrinkCheekbone: Double? = null, - shrinkJawbone: Double? = null, - whiteTeeth: Double? = null, - hairlineHeight: Double? = null, - narrowNose: Double? = null, - mouthSize: Double? = null, - chinLength: Double? = null, - brightEye: Double? = null, - darkCircles: Double? = null, - nasolabialFolds: Double? = null - ) { - if (fuRenderKit.faceBeauty == null) { - fuRenderKit.faceBeauty = - FaceBeauty(FUBundleData("graphics" + File.separator + "face_beautification.bundle")) - } + internal fun setBeautyAPI(beautyAPI: FaceUnityBeautyAPI) { + this.beautyAPI = beautyAPI + } + + private fun runOnBeautyThread(run: () -> Unit) { + beautyAPI?.runOnProcessThread(run) ?: run.invoke() + } + + + class BeautyConfig { + + private val fuRenderKit = FURenderKit.getInstance() + + // 美颜配置 + private val faceBeauty: FaceBeauty + get() { + var faceBeauty = fuRenderKit.faceBeauty + if (faceBeauty == null) { + faceBeauty = + FaceBeauty(FUBundleData("graphics" + File.separator + "face_beautification.bundle")) + fuRenderKit.faceBeauty = faceBeauty + } + return faceBeauty + } + + + // 资源基础路径 + private val resourceBase = "beauty_faceunity" + // 磨皮 - smooth?.let { fuRenderKit.faceBeauty?.blurIntensity = it * 6 } + var smooth = 0.65f + set(value) { + field = value + runOnBeautyThread { + faceBeauty.blurIntensity = value * 6.0 + } + } // 美白 - whiten?.let { fuRenderKit.faceBeauty?.colorIntensity = it * 2 } + var whiten = 0.65f + set(value) { + field = value + runOnBeautyThread { + faceBeauty.colorIntensity = value * 2.0 + } + } // 瘦脸 - thinFace?.let { fuRenderKit.faceBeauty?.cheekThinningIntensity = it } + var thinFace = 0.3f + set(value) { + field = value + runOnBeautyThread { + faceBeauty.cheekThinningIntensity = value.toDouble() + } + } // 大眼 - enlargeEye?.let { fuRenderKit.faceBeauty?.eyeEnlargingIntensity = it } + var enlargeEye = 0.0f + set(value) { + field = value + runOnBeautyThread { + faceBeauty.eyeEnlargingIntensity = value.toDouble() + } + } // 红润 - redden?.let { fuRenderKit.faceBeauty?.redIntensity = it * 2 } + var redden = 0.0f + set(value) { + field = value + runOnBeautyThread { + faceBeauty.redIntensity = value * 2.0 + } + } + + // 五官立体 + var faceThree = 0.0f + set(value) { + field = value + runOnBeautyThread { + faceBeauty.faceThreeIntensity = value.toDouble() + } + } // 瘦颧骨 - shrinkCheekbone?.let { fuRenderKit.faceBeauty?.cheekBonesIntensity = it } + var shrinkCheekbone = 0.3f + set(value) { + field = value + runOnBeautyThread { + faceBeauty.cheekBonesIntensity = value.toDouble() + } + } // 下颌骨 - shrinkJawbone?.let { fuRenderKit.faceBeauty?.lowerJawIntensity = it } + var shrinkJawbone = 0.0f + set(value) { + field = value + runOnBeautyThread { + faceBeauty.lowerJawIntensity = value.toDouble() + } + } // 美牙 - whiteTeeth?.let { fuRenderKit.faceBeauty?.toothIntensity = it } + var whiteTeeth = 0.0f + set(value) { + field = value + runOnBeautyThread { + faceBeauty.toothIntensity = value.toDouble() + } + } // 额头 - hairlineHeight?.let { fuRenderKit.faceBeauty?.forHeadIntensity = it } + var hairlineHeight = 0.0f + set(value) { + field = value + runOnBeautyThread { + faceBeauty.forHeadIntensity = value.toDouble() + } + } // 瘦鼻 - narrowNose?.let { fuRenderKit.faceBeauty?.noseIntensity = it } + var narrowNose = 0.0f + set(value) { + field = value + runOnBeautyThread { + faceBeauty.noseIntensity = value.toDouble() + } + } // 嘴形 - mouthSize?.let { fuRenderKit.faceBeauty?.mouthIntensity = it } + var mouthSize = 0.0f + set(value) { + field = value + runOnBeautyThread { + faceBeauty.mouthIntensity = value.toDouble() + } + } // 下巴 - chinLength?.let { fuRenderKit.faceBeauty?.chinIntensity = it } + var chinLength = 0.0f + set(value) { + field = value + runOnBeautyThread { + faceBeauty.chinIntensity = value.toDouble() + } + } // 亮眼 - brightEye?.let { fuRenderKit.faceBeauty?.eyeBrightIntensity = it } + var brightEye = 0.0f + set(value) { + field = value + runOnBeautyThread { + faceBeauty.eyeBrightIntensity = value.toDouble() + } + } // 祛黑眼圈 - darkCircles?.let { fuRenderKit.faceBeauty?.removePouchIntensity = it } + var darkCircles = 0.0f + set(value) { + field = value + runOnBeautyThread { + faceBeauty.removePouchIntensity = value.toDouble() + } + } // 祛法令纹 - nasolabialFolds?.let { fuRenderKit.faceBeauty?.removeLawPatternIntensity = it } + var nasolabialFolds = 0.0f + set(value) { + field = value + runOnBeautyThread { + faceBeauty.removeLawPatternIntensity = value.toDouble() + } + } + + // 锐化 + var sharpen = 0.0f + set(value) { + field = value + runOnBeautyThread { + faceBeauty.sharpenIntensity = value.toDouble() + } + } + + // 贴纸 + var sticker: String? = null + set(value) { + field = value + runOnBeautyThread { + fuRenderKit.propContainer.removeAllProp() + if (!TextUtils.isEmpty(value)) { + val prop = Sticker(FUBundleData("$resourceBase/$sticker")) + fuRenderKit.propContainer.addProp(prop) + } + } + } + + // 美妆 + var makeUp: MakeUpItem? = null + set(value) { + field = value + runOnBeautyThread { + if (value == null) { + fuRenderKit.makeup = null + } else { + val makeup = + SimpleMakeup(FUBundleData("graphics" + File.separator + "face_makeup.bundle")) + makeup.setCombinedConfig(FUBundleData("$resourceBase/${value.path}")) + makeup.makeupIntensity = value.intensity.toDouble() + fuRenderKit.makeup = makeup + } + } + } + + + fun reset() { + smooth = 0.65f + whiten = 0.65f + thinFace = 0.3f + enlargeEye = 0.0f + redden = 0.0f + shrinkCheekbone = 0.3f + shrinkJawbone = 0.0f + whiteTeeth = 0.0f + hairlineHeight = 0.0f + narrowNose = 0.0f + mouthSize = 0.0f + chinLength = 0.0f + brightEye = 0.0f + darkCircles = 0.0f + nasolabialFolds = 0.0f + faceThree = 0.0f + + makeUp = null + sticker = null + } } + + data class MakeUpItem( + val path: String, + val intensity: Float + ) + + } \ No newline at end of file diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/SenseTimeBeauty.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/SenseTimeBeauty.java index fc4bb8b41..03b69da34 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/SenseTimeBeauty.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/SenseTimeBeauty.java @@ -12,8 +12,6 @@ import androidx.annotation.NonNull; import androidx.annotation.Nullable; -import com.softsugar.stmobile.params.STEffectBeautyType; - import java.io.File; import java.io.IOException; import java.util.Locale; @@ -25,7 +23,6 @@ import io.agora.api.example.common.widget.VideoReportLayout; import io.agora.api.example.databinding.FragmentBeautyScenetimeBinding; import io.agora.api.example.utils.TokenUtils; -import io.agora.beautyapi.sensetime.BeautyPreset; import io.agora.beautyapi.sensetime.CameraConfig; import io.agora.beautyapi.sensetime.CaptureMode; import io.agora.beautyapi.sensetime.Config; @@ -129,20 +126,29 @@ protected void onBackPressed() { private void initVideoView() { mBinding.cbFaceBeautify.setOnCheckedChangeListener((buttonView, isChecked) -> { - senseTimeBeautyAPI.setBeautyPreset(isChecked ? BeautyPreset.DEFAULT : BeautyPreset.CUSTOM); + SenseTimeBeautySDK.INSTANCE.getBeautyConfig().setWhiten( + isChecked ? 1.0f: 0.0f + ); }); mBinding.cbMakeup.setOnCheckedChangeListener((buttonView, isChecked) -> { if (isChecked) { - SenseTimeBeautySDK.INSTANCE.setMakeUpItem(requireContext(), STEffectBeautyType.EFFECT_BEAUTY_MAKEUP_ALL, "makeup_lip" + File.separator + "12自然.zip", 1.0f); + SenseTimeBeautySDK.INSTANCE.getBeautyConfig().setMakeUp(new SenseTimeBeautySDK.MakeUpItem( + requireContext(), + "style_lightly" + File.separator + "hunxue.zip", + 1.0f + )); } else { - SenseTimeBeautySDK.INSTANCE.setMakeUpItem(requireContext(), STEffectBeautyType.EFFECT_BEAUTY_MAKEUP_ALL, "", 0.0f); + SenseTimeBeautySDK.INSTANCE.getBeautyConfig().setMakeUp(null); } }); mBinding.cbSticker.setOnCheckedChangeListener((buttonView, isChecked) -> { if (isChecked) { - SenseTimeBeautySDK.INSTANCE.setStickerItem(requireContext(), "sticker_face_shape" + File.separator + "ShangBanLe.zip", true); + SenseTimeBeautySDK.INSTANCE.getBeautyConfig().setSticker(new SenseTimeBeautySDK.StickerItem( + requireContext(), + "sticker_face_shape" + File.separator + "ShangBanLe.zip" + )); } else { - SenseTimeBeautySDK.INSTANCE.cleanSticker(); + SenseTimeBeautySDK.INSTANCE.getBeautyConfig().setSticker(null); } }); mBinding.ivCamera.setOnClickListener(v -> { diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/SenseTimeBeautySDK.kt b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/SenseTimeBeautySDK.kt index fe28d394c..1e8a3bb4d 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/SenseTimeBeautySDK.kt +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/SenseTimeBeautySDK.kt @@ -1,362 +1,523 @@ package io.agora.api.example.examples.advanced.beauty import android.content.Context +import android.text.TextUtils import android.util.Log -import com.softsugar.stmobile.STCommonNative import com.softsugar.stmobile.STMobileAuthentificationNative import com.softsugar.stmobile.STMobileEffectNative import com.softsugar.stmobile.STMobileEffectParams import com.softsugar.stmobile.STMobileHumanActionNative import com.softsugar.stmobile.params.STEffectBeautyType -import com.softsugar.stmobile.params.STHumanActionParamsType -import io.agora.api.example.utils.FileKtUtils -import java.util.concurrent.Executors - -/** - * Sense time beauty s d k - * - * @constructor Create empty Sense time beauty s d k - */ +import io.agora.api.example.utils.FileUtils +import io.agora.beautyapi.sensetime.SenseTimeBeautyAPI + object SenseTimeBeautySDK { - private val TAG = "SenseTimeBeautySDK" + private const val TAG = "SenseTimeBeautySDK" private val resourcePath = "beauty_sensetime" private val humanActionCreateConfig = 0 - private val packageMap = mutableMapOf() - - private val MODEL_106 = "models/M_SenseME_Face_Video_Template_p_3.9.0.3.model" // 106 - private val MODEL_FACE_EXTRA = "models/M_SenseME_Face_Extra_Advanced_Template_p_2.0.0.model" // 282 - private val MODEL_AVATAR_HELP = "models/M_SenseME_Avatar_Help_p_2.3.7.model" // avatar人脸驱动 - private val MODEL_LIPS_PARSING = "models/M_SenseME_MouthOcclusion_p_1.3.0.1.model" // 嘴唇分割 - private val MODEL_HAND = "models/M_SenseME_Hand_p_6.0.8.1.model" // 手势 - private val MODEL_SEGMENT = "models/M_SenseME_Segment_Figure_p_4.14.1.1.model" // 前后背景分割 - private val MODEL_SEGMENT_HAIR = "models/M_SenseME_Segment_Hair_p_4.4.0.model" // 头发分割 - private val MODEL_FACE_OCCLUSION = "models/M_SenseME_FaceOcclusion_p_1.0.7.1.model" // 妆容遮挡 - private val MODEL_SEGMENT_SKY = "models/M_SenseME_Segment_Sky_p_1.1.0.1.model" // 天空分割 - // private val MODEL_SEGMENT_SKIN = "models/M_SenseME_Segment_Skin_p_1.0.1.1.model" // 皮肤分割 - private val MODEL_3DMESH = "models/M_SenseME_3DMesh_Face2396pt_280kpts_Ear_p_1.1.0v2.model" // 3DMesh - // private val MODEL_HEAD_P_EAR = "models/M_SenseME_Ear_p_1.0.1.1.model" // 搭配 mesh 耳朵模型 - private val MODEL_360HEAD_INSTANCE = "models/M_SenseME_3Dmesh_360Head2396pt_p_1.0.0.1.model" // 360度人头mesh - private val MODEL_FOOT = "models/M_SenseME_Foot_p_2.10.7.model" // 鞋子检测模型 - private val MODEL_PANT = "models/M_SenseME_Segment_Trousers_p_1.1.10.model" // 裤腿的检测 - private val MODEL_WRIST = "models/M_SenseME_Wrist_p_1.4.0.model" // 试表 - private val MODEL_CLOTH = "models/M_SenseME_Segment_Clothes_p_1.0.2.2.model" // 衣服分割 - private val MODEL_HEAD_INSTANCE = "models/M_SenseME_Segment_Head_Instance_p_1.1.0.1.model" // 实例分割版本 - // private val MODEL_HEAD_P_INSTANCE = "models/M_SenseME_Head_p_1.3.0.1.model" // 360度人头-头部模型 - private val MODEL_NAIL = "models/M_SenseME_Nail_p_2.4.0.model" // 指甲检测 - - private val workerThread = Executors.newSingleThreadExecutor() - - /** - * Mobile effect native - */ - val mobileEffectNative = STMobileEffectNative() - - /** - * Human action native - */ - val humanActionNative = STMobileHumanActionNative() - - - /** - * Init beauty s d k - * - * @param context - */ - fun initBeautySDK(context: Context){ - workerThread.submit { - checkLicense(context) + + private const val MODEL_106 = "models/M_SenseME_Face_Video_Template_p_3.9.0.3.model" // 106 + // private const val MODEL_FACE_EXTRA = "models/M_SenseME_Face_Extra_Advanced_Template_p_2.0.0.model" // 282 + // private const val MODEL_AVATAR_HELP = "models/M_SenseME_Avatar_Help_p_2.3.7.model" // avatar人脸驱动 + // private const val MODEL_LIPS_PARSING = "models/M_SenseME_MouthOcclusion_p_1.3.0.1.model" // 嘴唇分割 + // private const val MODEL_HAND = "models/M_SenseME_Hand_p_6.0.8.1.model" // 手势 + // private const val MODEL_SEGMENT = "models/M_SenseME_Segment_Figure_p_4.14.1.1.model" // 前后背景分割 + // private const val MODEL_SEGMENT_HAIR = "models/M_SenseME_Segment_Hair_p_4.4.0.model" // 头发分割 + // private const val MODEL_FACE_OCCLUSION = "models/M_SenseME_FaceOcclusion_p_1.0.7.1.model" // 妆容遮挡 + // private const val MODEL_SEGMENT_SKY = "models/M_SenseME_Segment_Sky_p_1.1.0.1.model" // 天空分割 + // private const val MODEL_SEGMENT_SKIN = "models/M_SenseME_Segment_Skin_p_1.0.1.1.model" // 皮肤分割 + // private const val MODEL_3DMESH = "models/M_SenseME_3DMesh_Face2396pt_280kpts_Ear_p_1.1.0v2.model" // 3DMesh + // private const val MODEL_HEAD_P_EAR = "models/M_SenseME_Ear_p_1.0.1.1.model" // 搭配 mesh 耳朵模型 + // private const val MODEL_360HEAD_INSTANCE = "models/M_SenseME_3Dmesh_360Head2396pt_p_1.0.0.1.model" // 360度人头mesh + // private const val MODEL_FOOT = "models/M_SenseME_Foot_p_2.10.7.model" // 鞋子检测模型 + // private const val MODEL_PANT = "models/M_SenseME_Segment_Trousers_p_1.1.10.model" // 裤腿的检测 + // private const val MODEL_WRIST = "models/M_SenseME_Wrist_p_1.4.0.model" // 试表 + // private const val MODEL_CLOTH = "models/M_SenseME_Segment_Clothes_p_1.0.2.2.model" // 衣服分割 + // private const val MODEL_HEAD_INSTANCE = "models/M_SenseME_Segment_Head_Instance_p_1.1.0.1.model" // 实例分割版本 + // private const val MODEL_HEAD_P_INSTANCE = "models/M_SenseME_Head_p_1.3.0.1.model" // 360度人头-头部模型 + // private const val MODEL_NAIL = "models/M_SenseME_Nail_p_2.4.0.model" // 指甲检测 + + private var stickerPackageId = 0 + + // 特效句柄 + private var _mobileEffectNative: STMobileEffectNative? = null + val mobileEffectNative + get() = _mobileEffectNative ?: throw RuntimeException("Please initMobileEffect firstly!") + + // 人脸识别句柄 + private var _humanActionNative: STMobileHumanActionNative? = null + val humanActionNative + get() = _humanActionNative ?: throw RuntimeException("Please initBeautySDK firstly!") + + // 美颜配置 + val beautyConfig = BeautyConfig() + + private var beautyAPI: SenseTimeBeautyAPI? = null + + fun initBeautySDK(context: Context): Boolean { + if (checkLicense(context)) { initHumanAction(context) + return true } + return false } - /** - * Init mobile effect - * - * @param context - */ - fun initMobileEffect(context: Context){ - val result = - mobileEffectNative.createInstance(context, STMobileEffectNative.EFFECT_CONFIG_NONE) - mobileEffectNative.setParam(STMobileEffectParams.EFFECT_PARAM_QUATERNION_SMOOTH_FRAME, 5f) - Log.d(TAG, "SenseTime >> STMobileEffectNative create result : $result") + fun unInitBeautySDK() { + beautyAPI = null + unInitHumanActionNative() + beautyConfig.reset() } - /** - * Un init mobile effect - * - */ - fun unInitMobileEffect(){ - mobileEffectNative.destroyInstance() + fun initMobileEffect(context: Context) { + if (_mobileEffectNative != null) { + return + } + _mobileEffectNative = STMobileEffectNative() + val result = + _mobileEffectNative?.createInstance(context, STMobileEffectNative.EFFECT_CONFIG_NONE) + _mobileEffectNative?.setParam(STMobileEffectParams.EFFECT_PARAM_QUATERNION_SMOOTH_FRAME, 5f) + Log.d(TAG, "SenseTime >> STMobileEffectNative create result : $result") } - /** - * Release - * - */ - fun release() { - mobileEffectNative.destroyInstance() + fun unInitMobileEffect() { + _mobileEffectNative?.destroyInstance() + _mobileEffectNative = null } - private fun checkLicense(context: Context) { - val license = FileKtUtils.getAssetsString( + private fun checkLicense(context: Context): Boolean { + val license = FileUtils.getAssetsString( context, "$resourcePath/license/SenseME.lic" ) + if(TextUtils.isEmpty(license)){ + return false + } val activeCode = STMobileAuthentificationNative.generateActiveCodeFromBuffer( context, license, license.length ) - val success = activeCode.isNotEmpty() - if (success) { - Log.d(TAG, "SenseTime >> checkLicense successfully!") - } else { - Log.e(TAG, "SenseTime >> checkLicense failed!") - } + Log.d(TAG, "SenseTime >> checkLicense successfully! activeCode=$activeCode") + return true } - private fun initHumanAction(context: Context){ + private fun initHumanAction(context: Context) { + if (_humanActionNative != null) { + return + } + _humanActionNative = STMobileHumanActionNative() val assets = context.assets - val result = humanActionNative.createInstanceFromAssetFile( - "$resourcePath/$MODEL_106", - humanActionCreateConfig, - assets - ) + val result = _humanActionNative?.createInstanceFromAssetFile("$resourcePath/$MODEL_106", humanActionCreateConfig, assets) Log.d(TAG, "SenseTime >> STMobileHumanActionNative create result : $result") - if(result != 0){ + if (result != 0) { return } - humanActionNative.addSubModelFromAssetFile("$resourcePath/$MODEL_HAND", assets) - humanActionNative.addSubModelFromAssetFile("$resourcePath/$MODEL_SEGMENT", assets) - humanActionNative.addSubModelFromAssetFile("$resourcePath/$MODEL_FACE_EXTRA", assets) - humanActionNative.addSubModelFromAssetFile("$resourcePath/$MODEL_SEGMENT_HAIR", assets) - humanActionNative.addSubModelFromAssetFile("$resourcePath/$MODEL_LIPS_PARSING", assets) - humanActionNative.addSubModelFromAssetFile("$resourcePath/$MODEL_FACE_OCCLUSION", assets) - humanActionNative.addSubModelFromAssetFile("$resourcePath/$MODEL_SEGMENT_SKY", assets) - humanActionNative.addSubModelFromAssetFile("$resourcePath/$MODEL_AVATAR_HELP", assets) - humanActionNative.addSubModelFromAssetFile("$resourcePath/$MODEL_FOOT", assets) - humanActionNative.addSubModelFromAssetFile("$resourcePath/$MODEL_PANT", assets) - humanActionNative.addSubModelFromAssetFile("$resourcePath/$MODEL_3DMESH", assets) - humanActionNative.addSubModelFromAssetFile("$resourcePath/$MODEL_WRIST", assets) - humanActionNative.addSubModelFromAssetFile("$resourcePath/$MODEL_CLOTH", assets) - humanActionNative.addSubModelFromAssetFile("$resourcePath/$MODEL_HEAD_INSTANCE", assets) - humanActionNative.addSubModelFromAssetFile("$resourcePath/$MODEL_360HEAD_INSTANCE", assets) - humanActionNative.addSubModelFromAssetFile("$resourcePath/$MODEL_NAIL", assets) + // 其他模型配置 + // _humanActionNative?.addSubModelFromAssetFile("$resourcePath/$MODEL_FACE_EXTRA", assets) // 背景分割羽化程度[0,1](默认值0.35),0 完全不羽化,1羽化程度最高,在strenth较小时,羽化程度基本不变.值越大,前景与背景之间的过度边缘部分越宽. - humanActionNative.setParam(STHumanActionParamsType.ST_HUMAN_ACTION_PARAM_BACKGROUND_BLUR_STRENGTH, 0.35f) + // _humanActionNative?.setParam( + // STHumanActionParamsType.ST_HUMAN_ACTION_PARAM_BACKGROUND_BLUR_STRENGTH, + // 0.35f + // ) // 设置face mesh结果输出坐标系,(0: 屏幕坐标系, 1:3d世界坐标系, 2:3d摄像机坐标系,是摄像头透视投影坐标系, 原点在摄像机 默认是0) - humanActionNative.setParam(STHumanActionParamsType.ST_HUMAN_ACTION_PARAM_FACE_MESH_OUTPUT_FORMAT, 1.0f) + // _humanActionNative?.setParam( + // STHumanActionParamsType.ST_HUMAN_ACTION_PARAM_FACE_MESH_OUTPUT_FORMAT, + // 1.0f + // ) // 设置mesh渲染模式 - humanActionNative.setParam(STHumanActionParamsType.ST_HUMAN_ACTION_PARAM_MESH_MODE, STCommonNative.MESH_CONFIG.toFloat()) + // _humanActionNative?.setParam( + // STHumanActionParamsType.ST_HUMAN_ACTION_PARAM_MESH_MODE, + // STCommonNative.MESH_CONFIG.toFloat() + // ) // 设置人头实例分割 - humanActionNative.setParam(STHumanActionParamsType.ST_HUMAN_ACTION_PARAM_HEAD_SEGMENT_INSTANCE, 1.0f) + // _humanActionNative?.setParam( + // STHumanActionParamsType.ST_HUMAN_ACTION_PARAM_HEAD_SEGMENT_INSTANCE, + // 1.0f + // ) } - /** - * Set make up item - * - * @param context - * @param type - * @param path - * @param strength - */ - fun setMakeUpItem(context: Context, type: Int, path: String = "", strength: Float = 1.0f) { - if (path.isNotEmpty()) { - val assets = context.assets - mobileEffectNative.setBeautyFromAssetsFile(type, "$resourcePath/$path", assets) - mobileEffectNative.setBeautyStrength(type, strength) - } else { - mobileEffectNative.setBeauty(type, null) - } - } - /** - * Set sticker item - * - * @param context - * @param path - * @param attach - */ - fun setStickerItem(context: Context, path: String, attach: Boolean) { - if(attach){ - val assets = context.assets - packageMap[path] = mobileEffectNative.changePackageFromAssetsFile("$resourcePath/$path", assets) - }else{ - packageMap.remove(path)?.let { - mobileEffectNative.removeEffect(it) - } - } + private fun unInitHumanActionNative() { + _humanActionNative?.destroyInstance() + _humanActionNative = null } - /** - * Clean sticker - * - */ - fun cleanSticker(){ - packageMap.values.forEach { - mobileEffectNative.removeEffect(it) - } - packageMap.clear() - } - /** - * Set beauty - * - * @param smooth - * @param whiten - * @param thinFace - * @param enlargeEye - * @param redden - * @param shrinkCheekbone - * @param shrinkJawbone - * @param whiteTeeth - * @param hairlineHeight - * @param narrowNose - * @param mouthSize - * @param chinLength - * @param brightEye - * @param darkCircles - * @param nasolabialFolds - * @param saturation - * @param contrast - * @param sharpen - * @param clear - */ - fun setBeauty( - smooth: Float? = null, - whiten: Float? = null, - thinFace: Float? = null, - enlargeEye: Float? = null, - redden: Float? = null, - shrinkCheekbone: Float? = null, - shrinkJawbone: Float? = null, - whiteTeeth: Float? = null, - hairlineHeight: Float? = null, - narrowNose: Float? = null, - mouthSize: Float? = null, - chinLength: Float? = null, - brightEye: Float? = null, - darkCircles: Float? = null, - nasolabialFolds: Float? = null, - - saturation: Float? = null, - contrast: Float? = null, - sharpen: Float? = null, - clear: Float? = null - ){ - val effectNative = mobileEffectNative - // 锐化 - sharpen?.let { effectNative.setBeautyStrength(STEffectBeautyType.EFFECT_BEAUTY_TONE_SHARPEN, it) } + internal fun setBeautyAPI(beautyAPI: SenseTimeBeautyAPI){ + this.beautyAPI = beautyAPI + beautyConfig.resume() + } - // 清晰度 - clear?.let { effectNative.setBeautyStrength(STEffectBeautyType.EFFECT_BEAUTY_TONE_CLEAR, it) } + private fun runOnBeautyThread(run: () -> Unit) { + beautyAPI?.runOnProcessThread(run) ?: run.invoke() + } + open class BeautyConfig { // 磨皮 - smooth?.let { - effectNative.setBeautyMode( - STEffectBeautyType.EFFECT_BEAUTY_BASE_FACE_SMOOTH, - STEffectBeautyType.SMOOTH2_MODE - ) - effectNative.setBeautyStrength( - STEffectBeautyType.EFFECT_BEAUTY_BASE_FACE_SMOOTH, - it - ) - } + var smooth = 0.75f + set(value) { + field = value + val effectNative = _mobileEffectNative ?: return + runOnBeautyThread { + effectNative.setBeautyMode( + STEffectBeautyType.EFFECT_BEAUTY_BASE_FACE_SMOOTH, + STEffectBeautyType.SMOOTH2_MODE + ) + effectNative.setBeautyStrength( + STEffectBeautyType.EFFECT_BEAUTY_BASE_FACE_SMOOTH, + value + ) + } + } // 美白 - whiten?.let { - effectNative.setBeautyMode( - STEffectBeautyType.EFFECT_BEAUTY_BASE_WHITTEN, - STEffectBeautyType.WHITENING3_MODE - ) - effectNative.setBeautyStrength( - STEffectBeautyType.EFFECT_BEAUTY_BASE_WHITTEN, - it - ) - } + var whiten = 0.75f + set(value) { + field = value + val effectNative = _mobileEffectNative ?: return + runOnBeautyThread { + effectNative.setBeautyMode( + STEffectBeautyType.EFFECT_BEAUTY_BASE_WHITTEN, + STEffectBeautyType.WHITENING3_MODE + ) + effectNative.setBeautyStrength( + STEffectBeautyType.EFFECT_BEAUTY_BASE_WHITTEN, + value + ) + } + } // 瘦脸 - thinFace?.let { effectNative.setBeautyStrength(STEffectBeautyType.EFFECT_BEAUTY_PLASTIC_THIN_FACE, it) } + var thinFace = 0.3f + set(value) { + field = value + val effectNative = _mobileEffectNative ?: return + runOnBeautyThread { + effectNative.setBeautyStrength( + STEffectBeautyType.EFFECT_BEAUTY_PLASTIC_THIN_FACE, + value + ) + } + } + // 大眼 - enlargeEye?.let { effectNative.setBeautyStrength(STEffectBeautyType.EFFECT_BEAUTY_RESHAPE_ENLARGE_EYE, it) } + var enlargeEye = 0.0f + set(value) { + field = value + val effectNative = _mobileEffectNative ?: return + runOnBeautyThread { + effectNative.setBeautyStrength( + STEffectBeautyType.EFFECT_BEAUTY_RESHAPE_ENLARGE_EYE, + value + ) + } + } // 红润 - redden?.let { effectNative.setBeautyStrength(STEffectBeautyType.EFFECT_BEAUTY_BASE_REDDEN, it) } + var redden = 0.0f + set(value) { + field = value + val effectNative = _mobileEffectNative ?: return + runOnBeautyThread { + effectNative.setBeautyStrength( + STEffectBeautyType.EFFECT_BEAUTY_BASE_REDDEN, + value + ) + } + } + // 瘦颧骨 - shrinkCheekbone?.let { effectNative.setBeautyStrength(STEffectBeautyType.EFFECT_BEAUTY_PLASTIC_SHRINK_CHEEKBONE, it) } + var shrinkCheekbone = 0.3f + set(value) { + field = value + val effectNative = _mobileEffectNative ?: return + runOnBeautyThread { + effectNative.setBeautyStrength( + STEffectBeautyType.EFFECT_BEAUTY_PLASTIC_SHRINK_CHEEKBONE, + value + ) + } + } // 下颌骨 - shrinkJawbone?.let { effectNative.setBeautyStrength(STEffectBeautyType.EFFECT_BEAUTY_PLASTIC_SHRINK_JAWBONE, it) } + var shrinkJawbone = 0.0f + set(value) { + field = value + val effectNative = _mobileEffectNative ?: return + runOnBeautyThread { + effectNative.setBeautyStrength( + STEffectBeautyType.EFFECT_BEAUTY_PLASTIC_SHRINK_JAWBONE, + value + ) + } + } // 美牙 - whiteTeeth?.let { - effectNative.setBeautyStrength( - STEffectBeautyType.EFFECT_BEAUTY_PLASTIC_WHITE_TEETH, - it - ) - } + var whiteTeeth = 0.0f + set(value) { + field = value + val effectNative = _mobileEffectNative ?: return + runOnBeautyThread { + effectNative.setBeautyStrength( + STEffectBeautyType.EFFECT_BEAUTY_PLASTIC_WHITE_TEETH, + value + ) + } + } // 额头 - hairlineHeight?.let { effectNative.setBeautyStrength(STEffectBeautyType.EFFECT_BEAUTY_PLASTIC_HAIRLINE_HEIGHT, it) } + var hairlineHeight = 0.0f + set(value) { + field = value + val effectNative = _mobileEffectNative ?: return + runOnBeautyThread { + effectNative.setBeautyStrength( + STEffectBeautyType.EFFECT_BEAUTY_PLASTIC_HAIRLINE_HEIGHT, + value + ) + } + } // 瘦鼻 - narrowNose?.let { - effectNative.setBeautyStrength( - STEffectBeautyType.EFFECT_BEAUTY_PLASTIC_NARROW_NOSE, - it - ) - } + var narrowNose = 0.0f + set(value) { + field = value + val effectNative = _mobileEffectNative ?: return + runOnBeautyThread { + effectNative.setBeautyStrength( + STEffectBeautyType.EFFECT_BEAUTY_PLASTIC_NARROW_NOSE, + value + ) + } + } // 嘴形 - mouthSize?.let { effectNative.setBeautyStrength(STEffectBeautyType.EFFECT_BEAUTY_PLASTIC_MOUTH_SIZE, it) } + var mouthSize = 0.0f + set(value) { + field = value + val effectNative = _mobileEffectNative ?: return + runOnBeautyThread { + effectNative.setBeautyStrength( + STEffectBeautyType.EFFECT_BEAUTY_PLASTIC_MOUTH_SIZE, + value + ) + } + } + // 下巴 - chinLength?.let { - effectNative.setBeautyStrength( - STEffectBeautyType.EFFECT_BEAUTY_PLASTIC_CHIN_LENGTH,it - ) - } + var chinLength = 0.0f + set(value) { + field = value + val effectNative = _mobileEffectNative ?: return + runOnBeautyThread { + effectNative.setBeautyStrength( + STEffectBeautyType.EFFECT_BEAUTY_PLASTIC_CHIN_LENGTH, value + ) + } + } // 亮眼 - brightEye?.let { - effectNative.setBeautyStrength( - STEffectBeautyType.EFFECT_BEAUTY_PLASTIC_BRIGHT_EYE, - it - ) - } + var brightEye = 0.0f + set(value) { + field = value + val effectNative = _mobileEffectNative ?: return + runOnBeautyThread { + effectNative.setBeautyStrength( + STEffectBeautyType.EFFECT_BEAUTY_PLASTIC_BRIGHT_EYE, + value + ) + } + } // 祛黑眼圈 - darkCircles?.let { effectNative.setBeautyStrength(STEffectBeautyType.EFFECT_BEAUTY_PLASTIC_REMOVE_DARK_CIRCLES, it) } + var darkCircles = 0.0f + set(value) { + field = value + val effectNative = _mobileEffectNative ?: return + runOnBeautyThread { + effectNative.setBeautyStrength( + STEffectBeautyType.EFFECT_BEAUTY_PLASTIC_REMOVE_DARK_CIRCLES, + value + ) + } + } // 祛法令纹 - nasolabialFolds?.let { - effectNative.setBeautyStrength( - STEffectBeautyType.EFFECT_BEAUTY_PLASTIC_REMOVE_NASOLABIAL_FOLDS, - it - ) - } + var nasolabialFolds = 0.0f + set(value) { + field = value + val effectNative = _mobileEffectNative ?: return + runOnBeautyThread { + effectNative.setBeautyStrength( + STEffectBeautyType.EFFECT_BEAUTY_PLASTIC_REMOVE_NASOLABIAL_FOLDS, + value + ) + } + } // 饱和度 - saturation?.let { - effectNative.setBeautyStrength( - STEffectBeautyType.EFFECT_BEAUTY_TONE_SATURATION, - it - ) - } + var saturation = 0.0f + set(value) { + field = value + val effectNative = _mobileEffectNative ?: return + runOnBeautyThread { + effectNative.setBeautyStrength( + STEffectBeautyType.EFFECT_BEAUTY_TONE_SATURATION, + value + ) + } + } // 对比度 - contrast?.let { - effectNative.setBeautyStrength( - STEffectBeautyType.EFFECT_BEAUTY_TONE_CONTRAST, - it - ) + var contrast = 0.0f + set(value) { + field = value + val effectNative = _mobileEffectNative ?: return + runOnBeautyThread { + effectNative.setBeautyStrength( + STEffectBeautyType.EFFECT_BEAUTY_TONE_CONTRAST, + value + ) + } + } + + // 锐化 + var sharpen = 0.0f + set(value) { + field = value + val effectNative = _mobileEffectNative ?: return + runOnBeautyThread { + effectNative.setBeautyStrength( + STEffectBeautyType.EFFECT_BEAUTY_TONE_SHARPEN, + value + ) + } + } + + + // 清晰度 + var clear = 0.0f + set(value) { + field = value + val effectNative = _mobileEffectNative ?: return + runOnBeautyThread { + effectNative.setBeautyStrength( + STEffectBeautyType.EFFECT_BEAUTY_TONE_CLEAR, + value + ) + } + } + + // 美妆 + var makeUp: MakeUpItem? = null + set(value) { + field = value + runOnBeautyThread { + if (value == null) { + _mobileEffectNative?.setBeauty( + STEffectBeautyType.EFFECT_BEAUTY_MAKEUP_ALL, + null + ) + } else { + val assets = value.context.assets + _mobileEffectNative?.setBeautyFromAssetsFile( + STEffectBeautyType.EFFECT_BEAUTY_MAKEUP_ALL, + "$resourcePath/${value.path}", + assets + ) + _mobileEffectNative?.setBeautyStrength( + STEffectBeautyType.EFFECT_BEAUTY_MAKEUP_ALL, + value.strength + ) + } + } + } + + // 贴纸 + var sticker: StickerItem? = null + set(value) { + field = value + runOnBeautyThread { + if (value == null) { + if (stickerPackageId > 0) { + _mobileEffectNative?.removeEffect(stickerPackageId) + stickerPackageId = 0 + } + } else { + stickerPackageId = _mobileEffectNative?.changePackageFromAssetsFile( + "$resourcePath/${value.path}", + value.context.assets + ) ?: 0 + } + } + } + + internal fun reset() { + smooth = 0.75f + whiten = 0.75f + thinFace = 0.3f + enlargeEye = 0.0f + sharpen = 0.0f + clear = 0.0f + redden = 0.0f + shrinkCheekbone = 0.3f + shrinkJawbone = 0.0f + whiteTeeth = 0.0f + hairlineHeight = 0.0f + narrowNose = 0.0f + mouthSize = 0.0f + chinLength = 0.0f + brightEye = 0.0f + darkCircles = 0.0f + nasolabialFolds = 0.0f + saturation = 0.0f + contrast = 0.0f + + makeUp = null + sticker = null } + internal fun resume() { + smooth = smooth + whiten = whiten + thinFace = thinFace + enlargeEye = enlargeEye + sharpen = sharpen + clear = clear + redden = redden + shrinkCheekbone = shrinkCheekbone + shrinkJawbone = shrinkJawbone + whiteTeeth = whiteTeeth + hairlineHeight = hairlineHeight + narrowNose = narrowNose + mouthSize = mouthSize + chinLength = chinLength + brightEye = brightEye + darkCircles = darkCircles + nasolabialFolds = nasolabialFolds + saturation = saturation + contrast = contrast + + makeUp = makeUp + sticker = sticker + } } + + data class MakeUpItem( + val context: Context, + val path: String, + val strength: Float + ) + + data class StickerItem( + val context: Context, + val path: String + ) } \ No newline at end of file diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/videoRender/YuvFboProgram.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/videoRender/YuvFboProgram.java index 90c911f6b..0cc35fa72 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/videoRender/YuvFboProgram.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/videoRender/YuvFboProgram.java @@ -96,7 +96,7 @@ public Integer drawYuv(byte[] yuv, int width, int height) { matrix.preTranslate(0.5f, 0.5f); matrix.preScale(1f, -1f); // I420-frames are upside down matrix.preTranslate(-0.5f, -0.5f); - glRectDrawer.drawYuv(yuvUploader.getYuvTextures(), RendererCommon.convertMatrixFromAndroidGraphicsMatrix(matrix), width, height, 0, 0, width, height); + glRectDrawer.drawYuv(yuvUploader.getYuvTextures(), 0, RendererCommon.convertMatrixFromAndroidGraphicsMatrix(matrix), width, height, 0, 0, width, height); GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); GLES20.glFlush(); diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/utils/FileUtils.java b/Android/APIExample/app/src/main/java/io/agora/api/example/utils/FileUtils.java index f15f4c2ec..7bd8cbeaf 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/utils/FileUtils.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/utils/FileUtils.java @@ -4,11 +4,15 @@ import android.content.res.AssetManager; import android.text.TextUtils; +import org.jetbrains.annotations.NotNull; + +import java.io.BufferedReader; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; +import java.io.InputStreamReader; /** * The type File utils. @@ -114,6 +118,39 @@ public static void readInputStream(String storagePath, InputStream inputStream) } } + @NotNull + public static String getAssetsString(@NotNull Context context, @NotNull String path) { + StringBuilder sb = new StringBuilder(); + InputStreamReader isr = null; + BufferedReader br = null; + + try { + isr = new InputStreamReader(context.getResources().getAssets().open(path)); + br = new BufferedReader(isr); + String line = null; + while ((line = br.readLine()) != null){ + sb.append(line).append("\n"); + } + } catch (IOException e) { + throw new RuntimeException(e); + } finally { + if (isr != null) { + try { + isr.close(); + } catch (IOException e) { + e.printStackTrace(); + } + } + if (br != null) { + try { + br.close(); + } catch (IOException e) { + e.printStackTrace(); + } + } + } + return sb.toString(); + } } diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/ByteDanceBeautyAPI.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/ByteDanceBeautyAPI.kt index c14e73ae2..b3022ab34 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/ByteDanceBeautyAPI.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/ByteDanceBeautyAPI.kt @@ -26,45 +26,18 @@ package io.agora.beautyapi.bytedance import android.content.Context import android.view.View -import com.bytedance.labcv.effectsdk.RenderManager +import com.effectsar.labcv.effectsdk.RenderManager import io.agora.base.VideoFrame import io.agora.rtc2.Constants import io.agora.rtc2.RtcEngine -/** - * Version - */ -const val VERSION = "1.0.3" +const val VERSION = "1.0.6" -/** - * Capture mode - * - * @constructor Create empty Capture mode - */ enum class CaptureMode{ - /** - * Agora - * - * @constructor Create empty Agora - */ Agora, // 使用声网内部的祼数据接口进行处理 - - /** - * Custom - * - * @constructor Create empty Custom - */ Custom // 自定义模式,需要自己调用onFrame接口将原始视频帧传给BeautyAPI做处理 } -/** - * Event callback - * - * @property onBeautyStats - * @property onEffectInitialized - * @property onEffectDestroyed - * @constructor Create empty Event callback - */ data class EventCallback( /** * 统计数据回调,每处理完一帧后会回调一次 @@ -84,83 +57,27 @@ data class EventCallback( val onEffectDestroyed: (()->Unit)? = null ) -/** - * Beauty stats - * - * @property minCostMs - * @property maxCostMs - * @property averageCostMs - * @constructor Create empty Beauty stats - */ data class BeautyStats( val minCostMs:Long, // 统计区间内的最小值 val maxCostMs: Long, // 统计区间内的最大值 val averageCostMs: Long // 统计区间内的平均值 ) -/** - * Mirror mode - * - * @constructor Create empty Mirror mode - */ enum class MirrorMode { // 没有镜像正常画面的定义:前置拍到画面和手机看到画面是左右不一致的,后置拍到画面和手机看到画面是左右一致的 - /** - * Mirror Local Remote - * - * @constructor Create empty Mirror Local Remote - */ MIRROR_LOCAL_REMOTE, //本地远端都镜像,前置默认,本地和远端贴纸都正常 - - /** - * Mirror Local Only - * - * @constructor Create empty Mirror Local Only - */ MIRROR_LOCAL_ONLY, // 仅本地镜像,远端不镜像,,远端贴纸正常,本地贴纸镜像。用于打电话场景,电商直播场景(保证电商直播后面的告示牌文字是正的);这种模式因为本地远端是反的,所以肯定有一边的文字贴纸方向会是反的 - - /** - * Mirror Remote Only - * - * @constructor Create empty Mirror Remote Only - */ MIRROR_REMOTE_ONLY, // 仅远端镜像,本地不镜像,远端贴纸正常,本地贴纸镜像 - - /** - * Mirror None - * - * @constructor Create empty Mirror None - */ MIRROR_NONE // 本地远端都不镜像,后置默认,本地和远端贴纸都正常 } -/** - * Camera config - * - * @property frontMirror - * @property backMirror - * @constructor Create empty Camera config - */ data class CameraConfig( val frontMirror: MirrorMode = MirrorMode.MIRROR_LOCAL_REMOTE, // 前置默认镜像:本地远端都镜像 val backMirror: MirrorMode = MirrorMode.MIRROR_NONE // 后置默认镜像:本地远端都不镜像 ) -/** - * Config - * - * @property context - * @property rtcEngine - * @property renderManager - * @property eventCallback - * @property captureMode - * @property statsDuration - * @property statsEnable - * @property cameraConfig - * @constructor Create empty Config - */ data class Config( val context: Context, // Android Context上下文 val rtcEngine: RtcEngine, // 声网Rtc引擎 @@ -172,103 +89,23 @@ data class Config( val cameraConfig: CameraConfig = CameraConfig() // 摄像头镜像配置 ) -/** - * Error code - * - * @property value - * @constructor Create empty Error code - */ enum class ErrorCode(val value: Int) { - /** - * Error Ok - * - * @constructor Create empty Error Ok - */ ERROR_OK(0), // 一切正常 - - /** - * Error Has Not Initialized - * - * @constructor Create empty Error Has Not Initialized - */ ERROR_HAS_NOT_INITIALIZED(101), // 没有调用Initialize或调用失败情况下调用了其他API - - /** - * Error Has Initialized - * - * @constructor Create empty Error Has Initialized - */ ERROR_HAS_INITIALIZED(102), // 已经Initialize成功后再次调用报错 - - /** - * Error Has Released - * - * @constructor Create empty Error Has Released - */ ERROR_HAS_RELEASED(103), // 已经调用release销毁后还调用其他API - - /** - * Error Process Not Custom - * - * @constructor Create empty Error Process Not Custom - */ ERROR_PROCESS_NOT_CUSTOM(104), // 非Custom处理模式下调用onFrame接口从外部传入视频帧 - - /** - * Error Process Disable - * - * @constructor Create empty Error Process Disable - */ - ERROR_PROCESS_DISABLE(105), // 当调用enable(false)禁用美颜后调用onFrame接口返回 - - /** - * Error View Type Error - * - * @constructor Create empty Error View Type Error - */ - ERROR_VIEW_TYPE_ERROR(106), // 当调用setupLocalVideo时view类型错误时返回 - - /** - * Error Frame Skipped - * - * @constructor Create empty Error Frame Skipped - */ - ERROR_FRAME_SKIPPED(107), // 当处理帧忽略时在onFrame返回 + ERROR_VIEW_TYPE_ERROR(105), // 当调用setupLocalVideo时view类型错误时返回 + ERROR_FRAME_SKIPPED(106), // 当处理帧忽略时在onFrame返回 } -/** - * Beauty preset - * - * @constructor Create empty Beauty preset - */ enum class BeautyPreset { - /** - * Custom - * - * @constructor Create empty Custom - */ CUSTOM, // 不使用推荐的美颜参数 - - /** - * Default - * - * @constructor Create empty Default - */ DEFAULT // 默认的 } -/** - * Create byte dance beauty a p i - * - * @return - */ fun createByteDanceBeautyAPI(): ByteDanceBeautyAPI = ByteDanceBeautyAPIImpl() -/** - * Byte dance beauty a p i - * - * @constructor Create empty Byte dance beauty a p i - */ interface ByteDanceBeautyAPI { /** @@ -327,11 +164,6 @@ interface ByteDanceBeautyAPI { */ fun isFrontCamera(): Boolean - /** - * Get mirror applied - * - * @return - */ fun getMirrorApplied(): Boolean /** @@ -339,6 +171,13 @@ interface ByteDanceBeautyAPI { */ fun setParameters(key: String, value: String) + /** + * 在处理线程里执行操作 + * + * @param run 操作run + */ + fun runOnProcessThread(run: ()->Unit) + /** * 释放资源,一旦释放后这个实例将无法使用 * diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/ByteDanceBeautyAPIImpl.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/ByteDanceBeautyAPIImpl.kt index fc055b6d9..e5b35bcc2 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/ByteDanceBeautyAPIImpl.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/ByteDanceBeautyAPIImpl.kt @@ -28,7 +28,8 @@ import android.graphics.Matrix import android.view.SurfaceView import android.view.TextureView import android.view.View -import com.bytedance.labcv.effectsdk.BytedEffectConstants +import com.effectsar.labcv.effectsdk.EffectsSDKEffectConstants +import com.effectsar.labcv.effectsdk.RenderManager import io.agora.base.TextureBufferHelper import io.agora.base.VideoFrame import io.agora.base.VideoFrame.I420Buffer @@ -44,14 +45,10 @@ import io.agora.rtc2.gl.EglBaseProvider import io.agora.rtc2.video.IVideoFrameObserver import io.agora.rtc2.video.VideoCanvas import java.nio.ByteBuffer +import java.util.Collections import java.util.concurrent.Callable import java.util.concurrent.Executors -/** - * Byte dance beauty a p i impl - * - * @constructor Create empty Byte dance beauty a p i impl - */ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { private val TAG = "ByteDanceBeautyAPIImpl" private val reportId = "scenarioAPI" @@ -75,68 +72,33 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { private var isFrontCamera = true private var cameraConfig = CameraConfig() private var localVideoRenderMode = Constants.RENDER_MODE_HIDDEN + private val pendingProcessRunList = Collections.synchronizedList(mutableListOf<()->Unit>()) + private var frameWidth = 0 + private var frameHeight = 0 private enum class BeautyProcessType{ - /** - * Unknown - * - * @constructor Create empty Unknown - */ - UNKNOWN, - - /** - * Texture Oes - * - * @constructor Create empty Texture Oes - */ - TEXTURE_OES, - - /** - * Texture 2d - * - * @constructor Create empty Texture 2d - */ - TEXTURE_2D, - - /** - * I420 - * - * @constructor Create empty I420 - */ - I420 + UNKNOWN, TEXTURE_OES, TEXTURE_2D, I420 } - /** - * Initialize - * - * @param config - * @return - */ override fun initialize(config: Config): Int { if (this.config != null) { LogUtils.e(TAG, "initialize >> The beauty api has been initialized!") return ErrorCode.ERROR_HAS_INITIALIZED.value } this.config = config + this.cameraConfig = config.cameraConfig if (config.captureMode == CaptureMode.Agora) { config.rtcEngine.registerVideoFrameObserver(this) } statsHelper = StatsHelper(config.statsDuration) { this.config?.eventCallback?.onBeautyStats?.invoke(it) } - LogUtils.setLogFilePath(config.context.getExternalFilesDir("")?.absolutePath ?: "") LogUtils.i(TAG, "initialize >> config = $config") - LogUtils.i(TAG, "initialize >> beauty api version=$VERSION, beauty sdk version=${config.renderManager.sdkVersion}") + LogUtils.i(TAG, "initialize >> beauty api version=$VERSION, beauty sdk version=${RenderManager.getSDKVersion()}") config.rtcEngine.sendCustomReportMessage(reportId, reportCategory, "initialize", "$config", 0) return ErrorCode.ERROR_OK.value } - /** - * Enable - * - * @param enable - * @return - */ override fun enable(enable: Boolean): Int { LogUtils.i(TAG, "enable >> enable = $enable") if (config == null) { @@ -156,13 +118,6 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { return ErrorCode.ERROR_OK.value } - /** - * Setup local video - * - * @param view - * @param renderMode - * @return - */ override fun setupLocalVideo(view: View, renderMode: Int): Int { val rtcEngine = config?.rtcEngine if(rtcEngine == null){ @@ -180,12 +135,6 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { return ErrorCode.ERROR_VIEW_TYPE_ERROR.value } - /** - * On frame - * - * @param videoFrame - * @return - */ override fun onFrame(videoFrame: VideoFrame): Int { val conf = config if (conf == null) { @@ -200,9 +149,6 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { LogUtils.e(TAG, "onFrame >> The capture mode is not Custom!") return ErrorCode.ERROR_PROCESS_NOT_CUSTOM.value } - if (!enable) { - return ErrorCode.ERROR_PROCESS_DISABLE.value - } if (processBeauty(videoFrame)) { return ErrorCode.ERROR_OK.value } @@ -210,15 +156,6 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { return ErrorCode.ERROR_FRAME_SKIPPED.value } - /** - * Set beauty preset - * - * @param preset - * @param beautyNodePath - * @param beauty4ItemNodePath - * @param reSharpNodePath - * @return - */ override fun setBeautyPreset( preset: BeautyPreset, beautyNodePath: String, @@ -234,91 +171,105 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { LogUtils.e(TAG, "setBeautyPreset >> The beauty api has been released!") return ErrorCode.ERROR_HAS_RELEASED.value } + val initialized = textureBufferHelper != null + if(!initialized){ + runOnProcessThread { + setBeautyPreset(preset, beautyNodePath, beauty4ItemNodePath, reSharpNodePath) + } + return ErrorCode.ERROR_OK.value + } LogUtils.i(TAG, "setBeautyPreset >> preset = $preset") conf.rtcEngine.sendCustomReportMessage(reportId, reportCategory, "enable", "preset=$preset, beautyNodePath=$beautyNodePath, beauty4ItemNodePath=$beauty4ItemNodePath, reSharpNodePath=$reSharpNodePath", 0) - val renderManager = - config?.renderManager ?: return ErrorCode.ERROR_HAS_NOT_INITIALIZED.value - val enable = preset == BeautyPreset.DEFAULT - - renderManager.updateComposerNodes( - beautyNodePath, - "smooth", - if (enable) 0.3f else 0f - )// 磨皮 - renderManager.updateComposerNodes( - beautyNodePath, - "whiten", - if (enable) 0.5f else 0f - )// 美白 - renderManager.updateComposerNodes( - reSharpNodePath, - "Internal_Deform_Overall", - if (enable) 0.15f else 0f - )//瘦脸 - renderManager.updateComposerNodes( - reSharpNodePath, - "Internal_Deform_Zoom_Cheekbone", - if (enable) 0.3f else 0f - )//瘦颧骨 - renderManager.updateComposerNodes( - reSharpNodePath, - "Internal_Deform_Zoom_Jawbone", - if (enable) 0.46f else 0f - )//下颌骨 - renderManager.updateComposerNodes( - reSharpNodePath, - "Internal_Deform_Eye", - if (enable) 0.15f else 0f - )//大眼 - renderManager.updateComposerNodes( - beauty4ItemNodePath, - "BEF_BEAUTY_WHITEN_TEETH", - if (enable) 0.2f else 0f - )//美牙 - renderManager.updateComposerNodes( - reSharpNodePath, - "Internal_Deform_Forehead", - if (enable) 0.4f else 0f - )//额头 - renderManager.updateComposerNodes( - reSharpNodePath, - "Internal_Deform_Nose", - if (enable) 0.15f else 0f - )//瘦鼻 - renderManager.updateComposerNodes( - reSharpNodePath, - "Internal_Deform_ZoomMouth", - if (enable) 0.16f else 0f - )//嘴形 - renderManager.updateComposerNodes( - reSharpNodePath, - "Internal_Deform_Chin", - if (enable) 0.46f else 0f - )//下巴 - + runOnProcessThread { + val renderManager = + config?.renderManager ?: return@runOnProcessThread + + val enable = preset == BeautyPreset.DEFAULT + renderManager.updateComposerNodes( + beautyNodePath, + "smooth", + if (enable) 0.3f else 0f + )// 磨皮 + renderManager.updateComposerNodes( + beautyNodePath, + "whiten", + if (enable) 0.5f else 0f + )// 美白 + renderManager.updateComposerNodes( + reSharpNodePath, + "Internal_Deform_Overall", + if (enable) 0.15f else 0f + )//瘦脸 + renderManager.updateComposerNodes( + reSharpNodePath, + "Internal_Deform_Zoom_Cheekbone", + if (enable) 0.3f else 0f + )//瘦颧骨 + renderManager.updateComposerNodes( + reSharpNodePath, + "Internal_Deform_Zoom_Jawbone", + if (enable) 0.46f else 0f + )//下颌骨 + renderManager.updateComposerNodes( + reSharpNodePath, + "Internal_Deform_Eye", + if (enable) 0.15f else 0f + )//大眼 + renderManager.updateComposerNodes( + beauty4ItemNodePath, + "BEF_BEAUTY_WHITEN_TEETH", + if (enable) 0.2f else 0f + )//美牙 + renderManager.updateComposerNodes( + reSharpNodePath, + "Internal_Deform_Forehead", + if (enable) 0.4f else 0f + )//额头 + renderManager.updateComposerNodes( + reSharpNodePath, + "Internal_Deform_Nose", + if (enable) 0.15f else 0f + )//瘦鼻 + renderManager.updateComposerNodes( + reSharpNodePath, + "Internal_Deform_ZoomMouth", + if (enable) 0.16f else 0f + )//嘴形 + renderManager.updateComposerNodes( + reSharpNodePath, + "Internal_Deform_Chin", + if (enable) 0.46f else 0f + )//下巴 + } return ErrorCode.ERROR_OK.value } - /** - * Set parameters - * - * @param key - * @param value - */ override fun setParameters(key: String, value: String) { when (key) { "beauty_mode" -> beautyMode = value.toInt() } } - /** - * Update camera config - * - * @param config - * @return - */ + override fun runOnProcessThread(run: () -> Unit) { + if (config == null) { + LogUtils.e(TAG, "runOnProcessThread >> The beauty api has not been initialized!") + return + } + if (isReleased) { + LogUtils.e(TAG, "runOnProcessThread >> The beauty api has been released!") + return + } + if (textureBufferHelper?.handler?.looper?.thread == Thread.currentThread()) { + run.invoke() + } else if (textureBufferHelper != null) { + textureBufferHelper?.handler?.post(run) + } else { + pendingProcessRunList.add(run) + } + } + override fun updateCameraConfig(config: CameraConfig): Int { LogUtils.i(TAG, "updateCameraConfig >> oldCameraConfig=$cameraConfig, newCameraConfig=$config") cameraConfig = CameraConfig(config.frontMirror, config.backMirror) @@ -327,17 +278,8 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { return ErrorCode.ERROR_OK.value } - /** - * Is front camera - * - */ override fun isFrontCamera() = isFrontCamera - /** - * Release - * - * @return - */ override fun release(): Int { val conf = config if(conf == null){ @@ -348,12 +290,16 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { LogUtils.e(TAG, "setBeautyPreset >> The beauty api has been released!") return ErrorCode.ERROR_HAS_RELEASED.value } + if (conf.captureMode == CaptureMode.Agora) { + conf.rtcEngine.registerVideoFrameObserver(null) + } conf.rtcEngine.sendCustomReportMessage(reportId, reportCategory, "release", "", 0) LogUtils.i(TAG, "release") isReleased = true workerThreadExecutor.shutdown() textureBufferHelper?.let { textureBufferHelper = null + it.handler.removeCallbacksAndMessages(null) it.invoke { imageUtils?.release() agoraImageHelper?.release() @@ -366,6 +312,7 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { } statsHelper?.reset() statsHelper = null + pendingProcessRunList.clear() return ErrorCode.ERROR_OK.value } @@ -428,6 +375,17 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { return false } + val oldFrameWidth = frameWidth + val oldFrameHeight = frameHeight + frameWidth = videoFrame.rotatedWidth + frameHeight = videoFrame.rotatedHeight + if (oldFrameWidth > 0 || oldFrameHeight > 0) { + if(oldFrameWidth != frameWidth || oldFrameHeight != frameHeight){ + skipFrame = 2 + return false + } + } + if(!enable){ return true } @@ -436,15 +394,22 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { textureBufferHelper = TextureBufferHelper.create( "ByteDanceRender", EglBaseProvider.instance().rootEglBase.eglBaseContext - )?.apply { - invoke { - imageUtils = ImageUtil() - agoraImageHelper = AgoraImageHelper() - config?.eventCallback?.onEffectInitialized?.invoke() + ) + textureBufferHelper?.invoke { + imageUtils = ImageUtil() + agoraImageHelper = AgoraImageHelper() + config?.eventCallback?.onEffectInitialized?.invoke() + synchronized(pendingProcessRunList){ + val iterator = pendingProcessRunList.iterator() + while (iterator.hasNext()){ + iterator.next().invoke() + iterator.remove() + } } } LogUtils.i(TAG, "processBeauty >> create texture buffer, beautyMode=$beautyMode") } + val startTime = System.currentTimeMillis() val processTexId = when (beautyMode) { @@ -522,6 +487,9 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { mirror = !mirror } + val width = videoFrame.rotatedWidth + val height = videoFrame.rotatedHeight + val renderMatrix = Matrix() renderMatrix.preTranslate(0.5f, 0.5f) renderMatrix.preRotate(videoFrame.rotation.toFloat()) @@ -533,8 +501,7 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { val transform = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(finalMatrix) - val width = buffer.height - val height = buffer.width + val dstTexture = imageUtils.prepareTexture(width, height) val srcTexture = agoraImageHelper.transformTexture( buffer.textureId, @@ -549,7 +516,7 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { dstTexture, width, height, - BytedEffectConstants.Rotation.CLOCKWISE_ROTATE_90, + EffectsSDKEffectConstants.Rotation.CLOCKWISE_ROTATE_0, videoFrame.timestampNs ) if (!success) { @@ -563,7 +530,6 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { val texBufferHelper = textureBufferHelper ?: return -1 val imageUtils = imageUtils ?: return -1 val nv21Buffer = getNV21Buffer(videoFrame) ?: return -1 - val buffer = videoFrame.buffer val isFront = videoFrame.sourceType == VideoFrame.SourceType.kFrontCamera if (currBeautyProcessType != BeautyProcessType.I420) { @@ -575,8 +541,8 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { return texBufferHelper.invoke(Callable { val renderManager = config?.renderManager ?: return@Callable -1 - val width = buffer.height - val height = buffer.width + val width = videoFrame.rotatedWidth + val height = videoFrame.rotatedHeight val ySize = width * height val yBuffer = ByteBuffer.allocateDirect(ySize) @@ -590,16 +556,19 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { if((isFrontCamera && !captureMirror) || (!isFrontCamera && captureMirror)){ mirror = !mirror } - + val isScreenLandscape = videoFrame.rotation % 180 == 0 val dstTexture = imageUtils.prepareTexture(width, height) val srcTexture = imageUtils.transferYUVToTexture( yBuffer, vuBuffer, - height, - width, + if (isScreenLandscape) width else height, + if (isScreenLandscape) height else width, ImageUtil.Transition().apply { rotate(videoFrame.rotation.toFloat()) - flip(false, mirror) + flip( + if (isScreenLandscape) mirror else false, + if (isScreenLandscape) false else mirror + ) } ) renderManager.setCameraPostion(isFront) @@ -608,7 +577,7 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { dstTexture, width, height, - BytedEffectConstants.Rotation.CLOCKWISE_ROTATE_0, + EffectsSDKEffectConstants.Rotation.CLOCKWISE_ROTATE_0, videoFrame.timestampNs ) return@Callable if (success) { @@ -648,75 +617,29 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { // IVideoFrameObserver implements - /** - * On capture video frame - * - * @param sourceType - * @param videoFrame - * @return - */ override fun onCaptureVideoFrame(sourceType: Int, videoFrame: VideoFrame?): Boolean { videoFrame ?: return false return processBeauty(videoFrame) } - /** - * On pre encode video frame - * - * @param sourceType - * @param videoFrame - */ override fun onPreEncodeVideoFrame(sourceType: Int, videoFrame: VideoFrame?) = false - /** - * On media player video frame - * - * @param videoFrame - * @param mediaPlayerId - */ override fun onMediaPlayerVideoFrame(videoFrame: VideoFrame?, mediaPlayerId: Int) = false - /** - * On render video frame - * - * @param channelId - * @param uid - * @param videoFrame - */ override fun onRenderVideoFrame( channelId: String?, uid: Int, videoFrame: VideoFrame? ) = false - /** - * Get video frame process mode - * - */ override fun getVideoFrameProcessMode() = IVideoFrameObserver.PROCESS_MODE_READ_WRITE - /** - * Get video format preference - * - */ override fun getVideoFormatPreference() = IVideoFrameObserver.VIDEO_PIXEL_DEFAULT - /** - * Get rotation applied - * - */ override fun getRotationApplied() = false - /** - * Get mirror applied - * - */ override fun getMirrorApplied() = captureMirror && !enable - /** - * Get observed frame position - * - */ override fun getObservedFramePosition() = IVideoFrameObserver.POSITION_POST_CAPTURER } \ No newline at end of file diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/AgoraImageHelper.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/AgoraImageHelper.kt index 609139091..597e84c9b 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/AgoraImageHelper.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/AgoraImageHelper.kt @@ -30,25 +30,10 @@ import io.agora.base.internal.video.GlRectDrawer import io.agora.base.internal.video.GlTextureFrameBuffer import io.agora.base.internal.video.RendererCommon.GlDrawer -/** - * Agora image helper - * - * @constructor Create empty Agora image helper - */ class AgoraImageHelper { private var glFrameBuffer: GlTextureFrameBuffer? = null private var drawer : GlDrawer? = null - /** - * Transform texture - * - * @param texId - * @param texType - * @param width - * @param height - * @param transform - * @return - */ fun transformTexture( texId: Int, texType: VideoFrame.TextureBuffer.Type, @@ -71,9 +56,9 @@ class AgoraImageHelper { GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBuffer.frameBufferId) if(texType == VideoFrame.TextureBuffer.Type.OES){ - drawer.drawOes(texId, transform, width, height, 0, 0, width, height) + drawer.drawOes(texId, 0, transform, width, height, 0, 0, width, height) }else{ - drawer.drawRgb(texId, transform, width, height, 0, 0, width, height) + drawer.drawRgb(texId, 0, transform, width, height, 0, 0, width, height) } GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0) GLES20.glFinish() @@ -81,10 +66,6 @@ class AgoraImageHelper { return frameBuffer.textureId } - /** - * Release - * - */ fun release() { glFrameBuffer?.release() glFrameBuffer = null diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/GLTestUtils.java b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/GLTestUtils.java index 70cb19208..3d17f9f74 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/GLTestUtils.java +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/GLTestUtils.java @@ -37,23 +37,9 @@ import java.nio.ByteBuffer; import java.nio.IntBuffer; -/** - * The type Gl test utils. - */ -public final class GLTestUtils { +public class GLTestUtils { private static final String TAG = "GLUtils"; - private GLTestUtils() { - } - - /** - * Gets texture 2 d image. - * - * @param textureID the texture id - * @param width the width - * @param height the height - * @return the texture 2 d image - */ public static Bitmap getTexture2DImage(int textureID, int width, int height) { try { int[] oldFboId = new int[1]; @@ -95,14 +81,6 @@ public static Bitmap getTexture2DImage(int textureID, int width, int height) { return null; } - /** - * Gets texture oes image. - * - * @param textureID the texture id - * @param width the width - * @param height the height - * @return the texture oes image - */ public static Bitmap getTextureOESImage(int textureID, int width, int height) { try { int[] oldFboId = new int[1]; @@ -144,14 +122,6 @@ public static Bitmap getTextureOESImage(int textureID, int width, int height) { return null; } - /** - * Nv 21 to bitmap bitmap. - * - * @param nv21 the nv 21 - * @param width the width - * @param height the height - * @return the bitmap - */ public static Bitmap nv21ToBitmap(byte[] nv21, int width, int height) { Bitmap bitmap = null; try { @@ -166,7 +136,7 @@ public static Bitmap nv21ToBitmap(byte[] nv21, int width, int height) { return bitmap; } - private static Bitmap readBitmap(int width, int height) { + private static Bitmap readBitmap(int width, int height){ ByteBuffer rgbaBuf = ByteBuffer.allocateDirect(width * height * 4); rgbaBuf.position(0); GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaBuf); diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/ImageUtil.java b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/ImageUtil.java index c324956da..9289d145f 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/ImageUtil.java +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/ImageUtil.java @@ -30,10 +30,9 @@ import android.graphics.Point; import android.opengl.GLES20; import android.opengl.Matrix; -import android.util.Log; import android.widget.ImageView; -import com.bytedance.labcv.effectsdk.BytedEffectConstants; +import com.effectsar.labcv.effectsdk.EffectsSDKEffectConstants; import java.nio.ByteBuffer; @@ -47,69 +46,73 @@ public class ImageUtil { private static final String TAG = "ImageUtil"; - /** - * The M frame buffers. - */ protected int[] mFrameBuffers; - /** - * The M frame buffer textures. - */ protected int[] mFrameBufferTextures; - /** - * The Frame buffer num. - */ - protected int frameBufferNum = 1; - /** - * The M frame buffer shape. - */ + protected int FRAME_BUFFER_NUM = 1; protected Point mFrameBufferShape; private ProgramManager mProgramManager; - /** - * {en} - * Default constructor - *

- * {zh} + + + /** {zh} * 默认构造函数 */ + /** {en} + * Default constructor + */ + public ImageUtil() { } - /** - * {zh} + /** {zh} * 准备帧缓冲区纹理对象 * * @param width 纹理宽度 * @param height 纹理高度 - * @return 纹理ID int

{en} Prepare frame buffer texture object + * @return 纹理ID + */ + /** {en} + * Prepare frame buffer texture object + * + * @param width texture width + * @param height texture height + * @return texture ID */ + public int prepareTexture(int width, int height) { initFrameBufferIfNeed(width, height); return mFrameBufferTextures[0]; } - /** - * {zh} + /** {zh} * 默认的离屏渲染绑定的纹理 - * - * @return 纹理id output texture

{en} Default off-screen rendering bound texture + * @return 纹理id */ + /** {en} + * Default off-screen rendering bound texture + * @return texture id + */ + public int getOutputTexture() { - if (mFrameBufferTextures == null) { - return GlUtil.NO_TEXTURE; - } + if (mFrameBufferTextures == null) return GlUtil.NO_TEXTURE; return mFrameBufferTextures[0]; } - /** - * {zh} + /** {zh} * 初始化帧缓冲区 * * @param width 缓冲的纹理宽度 * @param height 缓冲的纹理高度 */ + /** {en} + * Initialize frame buffer + * + * @param width buffered texture width + * @param height buffered texture height + */ + private void initFrameBufferIfNeed(int width, int height) { boolean need = false; if (null == mFrameBufferShape || mFrameBufferShape.x != width || mFrameBufferShape.y != height) { @@ -120,11 +123,11 @@ private void initFrameBufferIfNeed(int width, int height) { } if (need) { destroyFrameBuffers(); - mFrameBuffers = new int[frameBufferNum]; - mFrameBufferTextures = new int[frameBufferNum]; - GLES20.glGenFramebuffers(frameBufferNum, mFrameBuffers, 0); - GLES20.glGenTextures(frameBufferNum, mFrameBufferTextures, 0); - for (int i = 0; i < frameBufferNum; i++) { + mFrameBuffers = new int[FRAME_BUFFER_NUM]; + mFrameBufferTextures = new int[FRAME_BUFFER_NUM]; + GLES20.glGenFramebuffers(FRAME_BUFFER_NUM, mFrameBuffers, 0); + GLES20.glGenTextures(FRAME_BUFFER_NUM, mFrameBufferTextures, 0); + for (int i = 0; i < FRAME_BUFFER_NUM; i++) { bindFrameBuffer(mFrameBufferTextures[i], mFrameBuffers[i], width, height); } mFrameBufferShape = new Point(width, height); @@ -132,21 +135,35 @@ private void initFrameBufferIfNeed(int width, int height) { } - /** - * {zh} + /** {zh} * 销毁帧缓冲区对象 */ + /** {en} + * Destroy frame buffer objects + */ + private void destroyFrameBuffers() { if (mFrameBufferTextures != null) { - GLES20.glDeleteTextures(frameBufferNum, mFrameBufferTextures, 0); + GLES20.glDeleteTextures(FRAME_BUFFER_NUM, mFrameBufferTextures, 0); mFrameBufferTextures = null; } if (mFrameBuffers != null) { - GLES20.glDeleteFramebuffers(frameBufferNum, mFrameBuffers, 0); + GLES20.glDeleteFramebuffers(FRAME_BUFFER_NUM, mFrameBuffers, 0); mFrameBuffers = null; } } + /** {zh} + * 纹理参数设置+buffer绑定 + * set texture params + * and bind buffer + */ + /** {en} + * Texture parameter setting + buffer binding + * set texture params + * and binding buffer + */ + private void bindFrameBuffer(int textureId, int frameBuffer, int width, int height) { GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId); GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, @@ -169,15 +186,14 @@ private void bindFrameBuffer(int textureId, int frameBuffer, int width, int heig } - /** - * {zh} + + /** {zh} * 释放资源,包括帧缓冲区及Program对象 - *

- * {en} + */ + /** {en} * Free resources, including frame buffers and Program objects - * {zh} - * 释放资源,包括帧缓冲区及Program对象 */ + public void release() { destroyFrameBuffers(); if (null != mProgramManager) { @@ -189,18 +205,23 @@ public void release() { } } - /** - * {zh} + /** {zh} * 读取渲染结果的buffer * * @param imageWidth 图像宽度 * @param imageHeight 图像高度 - * @return 渲染结果的像素Buffer 格式RGBA

{en} Read the buffer + * @return 渲染结果的像素Buffer 格式RGBA */ + /** {en} + * Read the buffer + * + * @param imageWidth image width + * @param imageHeight image height + * @return pixel Buffer format of the rendered result RGBA + */ + public ByteBuffer captureRenderResult(int imageWidth, int imageHeight) { - if (mFrameBufferTextures == null) { - return null; - } + if (mFrameBufferTextures == null) return null; int textureId = mFrameBufferTextures[0]; if (null == mFrameBufferTextures || textureId == GlUtil.NO_TEXTURE) { return null; @@ -237,15 +258,21 @@ public ByteBuffer captureRenderResult(int imageWidth, int imageHeight) { return mCaptureBuffer; } - /** - * {zh} + /** {zh} * 读取渲染结果的buffer * - * @param textureId the texture id * @param imageWidth 图像宽度 * @param imageHeight 图像高度 - * @return 渲染结果的像素Buffer 格式RGBA

{en} Read the buffer + * @return 渲染结果的像素Buffer 格式RGBA + */ + /** {en} + * Read the buffer + * + * @param imageWidth image width + * @param imageHeight image height + * @return pixel Buffer format of the rendered result RGBA */ + public ByteBuffer captureRenderResult(int textureId, int imageWidth, int imageHeight) { if (textureId == GlUtil.NO_TEXTURE) { return null; @@ -282,16 +309,25 @@ public ByteBuffer captureRenderResult(int textureId, int imageWidth, int imageHe return mCaptureBuffer; } - /** - * {zh} + /** {zh} * 纹理拷贝 * - * @param srcTexture the src texture - * @param dstTexture the dst texture - * @param width the width - * @param height the height - * @return boolean

{en} Texture copy + * @param srcTexture + * @param dstTexture + * @param width + * @param height + * @return */ + /** {en} + * Texture copy + * + * @param srcTexture + * @param dstTexture + * @param width + * @param height + * @return + */ + public boolean copyTexture(int srcTexture, int dstTexture, int width, int height) { if (srcTexture == GlUtil.NO_TEXTURE || dstTexture == GlUtil.NO_TEXTURE) { return false; @@ -315,133 +351,141 @@ public boolean copyTexture(int srcTexture, int dstTexture, int width, int height int error = GLES20.glGetError(); if (error != GLES20.GL_NO_ERROR) { String msg = "copyTexture glError 0x" + Integer.toHexString(error); - Log.e(TAG, msg); return false; } return true; + + } - /** - * {zh} - * + /** {zh} * @param inputTexture 输入纹理 * @param inputTextureFormat 输入纹理格式,2D/OES * @param outputTextureFormat 输出纹理格式,2D/OES * @param width 输入纹理的宽 * @param height 输入纹理的高 * @param transition 纹理变换方式 - * @return 输出纹理 int + * @return 输出纹理 + * @brief 纹理转纹理 */ - public int transferTextureToTexture(int inputTexture, BytedEffectConstants.TextureFormat inputTextureFormat, - BytedEffectConstants.TextureFormat outputTextureFormat, - int width, int height, Transition transition) { - if (outputTextureFormat != BytedEffectConstants.TextureFormat.Texure2D) { - LogUtils.e(TAG, "the inputTexture is not supported,please use Texure2D as output texture format"); - return GlUtil.NO_TEXTURE; - } + /** {en} + * @param inputTextureFormat input texture format, 2D/OES + * @param outputTextureFormat output texture format, 2D/OES + * @param width input texture width + * @param height input texture height + * @param transition texture transformation mode + * @return output texture + * @brief texture to texture + */ + + public int transferTextureToTexture(int inputTexture, EffectsSDKEffectConstants.TextureFormat inputTextureFormat, + EffectsSDKEffectConstants.TextureFormat outputTextureFormat, + int width, int height, Transition transition) { + if (outputTextureFormat != EffectsSDKEffectConstants.TextureFormat.Texure2D){ + LogUtils.e(TAG, "the inputTexture is not supported,please use Texure2D as output texture format"); + return GlUtil.NO_TEXTURE; + } if (null == mProgramManager) { mProgramManager = new ProgramManager(); } - boolean targetRoated = transition.getAngle() % 180 == 90; - return mProgramManager.getProgram(inputTextureFormat).drawFrameOffScreen(inputTexture, targetRoated ? height : width, targetRoated ? width : height, transition.getMatrix()); + boolean targetRoated = (transition.getAngle()%180 ==90); + return mProgramManager.getProgram(inputTextureFormat).drawFrameOffScreen(inputTexture, targetRoated?height:width, targetRoated?width:height, transition.getMatrix()); } private ProgramTextureYUV mYUVProgram; - - /** - * Transfer yuv to texture int. - * - * @param yBuffer the y buffer - * @param vuBuffer the vu buffer - * @param width the width - * @param height the height - * @param transition the transition - * @return the int - */ public int transferYUVToTexture(ByteBuffer yBuffer, ByteBuffer vuBuffer, int width, int height, Transition transition) { if (mYUVProgram == null) { mYUVProgram = new ProgramTextureYUV(); } int yTexture = GlUtil.createImageTexture(yBuffer, width, height, GLES20.GL_ALPHA); - int vuTexture = GlUtil.createImageTexture(vuBuffer, width / 2, height / 2, GLES20.GL_LUMINANCE_ALPHA); + int vuTexture = GlUtil.createImageTexture(vuBuffer, width/2, height/2, GLES20.GL_LUMINANCE_ALPHA); int rgbaTexture = mYUVProgram.drawFrameOffScreen(yTexture, vuTexture, width, height, transition.getMatrix()); GlUtil.deleteTextureId(new int[]{yTexture, vuTexture}); return rgbaTexture; } - /** - * {zh} - * - * @param texture 纹理 + /** {zh} + * @param texture 纹理 * @param inputTextureFormat 纹理格式,2D/OES - * @param outputFormat 输出 buffer 格式 - * @param width 宽 - * @param height 高 - * @param ratio the ratio + * @param outputFormat 输出 buffer 格式 + * @param width 宽 + * @param height 高 * @return 输出 buffer + * @brief 纹理转 buffer */ - public ByteBuffer transferTextureToBuffer(int texture, BytedEffectConstants.TextureFormat inputTextureFormat, - BytedEffectConstants.PixlFormat outputFormat, int width, int height, float ratio) { - if (outputFormat != BytedEffectConstants.PixlFormat.RGBA8888) { + /** {en} + * @param inputTextureFormat texture format, 2D/OES + * @param outputFormat output buffer format + * @param width width + * @param height height + * @return output buffer + * @brief texture turn buffer + */ + + public ByteBuffer transferTextureToBuffer(int texture, EffectsSDKEffectConstants.TextureFormat inputTextureFormat, + EffectsSDKEffectConstants.PixlFormat outputFormat, int width, int height, float ratio){ + if (outputFormat != EffectsSDKEffectConstants.PixlFormat.RGBA8888){ LogUtils.e(TAG, "the outputFormat is not supported,please use RGBA8888 as output texture format"); - return null; + return null; } if (null == mProgramManager) { mProgramManager = new ProgramManager(); } - return mProgramManager.getProgram(inputTextureFormat).readBuffer(texture, (int) (width * ratio), (int) (height * ratio)); + return mProgramManager.getProgram(inputTextureFormat).readBuffer(texture, (int) (width*ratio), (int)(height*ratio)); + + + } - /** - * Transfer texture to bitmap bitmap. - * - * @param texture the texture - * @param inputTextureFormat the input texture format - * @param width the width - * @param height the height - * @return the bitmap - */ - public Bitmap transferTextureToBitmap(int texture, BytedEffectConstants.TextureFormat inputTextureFormat, + public Bitmap transferTextureToBitmap(int texture, EffectsSDKEffectConstants.TextureFormat inputTextureFormat, int width, int height) { - ByteBuffer buffer = transferTextureToBuffer(texture, inputTextureFormat, BytedEffectConstants.PixlFormat.RGBA8888, + ByteBuffer buffer = transferTextureToBuffer(texture, inputTextureFormat, EffectsSDKEffectConstants.PixlFormat.RGBA8888, width, height, 1); if (buffer == null) { return null; } - return transferBufferToBitmap(buffer, BytedEffectConstants.PixlFormat.RGBA8888, width, height); + return transferBufferToBitmap(buffer, EffectsSDKEffectConstants.PixlFormat.RGBA8888, width, height); } - /** - * {zh} - * + /** {zh} * @param buffer 输入 buffer * @param inputFormat buffer 格式 * @param outputFormat 输出纹理格式 * @param width 宽 * @param height 高 - * @return 输出纹理 int + * @return 输出纹理 + * @brief buffer 转纹理 + */ + /** {en} + * @param inputFormat buffer format + * @param outputFormat output texture format + * @param width width + * @param height height + * @return output texture + * @brief buffer turn texture */ - public int transferBufferToTexture(ByteBuffer buffer, BytedEffectConstants.PixlFormat inputFormat, - BytedEffectConstants.TextureFormat outputFormat, int width, int height) { - if (inputFormat != BytedEffectConstants.PixlFormat.RGBA8888) { + public int transferBufferToTexture(ByteBuffer buffer, EffectsSDKEffectConstants.PixlFormat inputFormat, + EffectsSDKEffectConstants.TextureFormat outputFormat, int width, int height){ + + if (inputFormat != EffectsSDKEffectConstants.PixlFormat.RGBA8888){ LogUtils.e(TAG, "inputFormat support RGBA8888 only"); return GlUtil.NO_TEXTURE; } - if (outputFormat != BytedEffectConstants.TextureFormat.Texure2D) { + if (outputFormat != EffectsSDKEffectConstants.TextureFormat.Texure2D){ LogUtils.e(TAG, "outputFormat support Texure2D only"); return GlUtil.NO_TEXTURE; } - return create2DTexture(buffer, width, height, GL_RGBA); + return create2DTexture(buffer, width,height, GL_RGBA); } @@ -477,33 +521,48 @@ private int create2DTexture(ByteBuffer data, int width, int height, int format) return textureHandle; } - /** - * {zh} - * + /** {zh} * @param buffer 输入 buffer * @param inputFormat 输入 buffer 格式 * @param outputFormat 输出 buffer 格式 * @param width 宽 * @param height 高 * @return 输出 buffer + * @brief buffer 转 buffer + */ + /** {en} + * @param inputFormat input buffer format + * @param outputFormat output buffer format + * @param width width + * @param height height + * @return output buffer + * @brief buffer to buffer */ - public ByteBuffer transferBufferToBuffer(ByteBuffer buffer, BytedEffectConstants.PixlFormat inputFormat, - BytedEffectConstants.PixlFormat outputFormat, int width, int height) { + + public ByteBuffer transferBufferToBuffer(ByteBuffer buffer, EffectsSDKEffectConstants.PixlFormat inputFormat, + EffectsSDKEffectConstants.PixlFormat outputFormat, int width, int height){ return null; } - /** - * {zh} - * + /** {zh} * @param buffer 输入 buffer * @param format 输入 buffer 格式 * @param width 宽 * @param height 高 * @return 输出 bitmap + * @brief buffer 转 bitmap + */ + /** {en} + * @param format input buffer format + * @param width width + * @param height height + * @return output bitmap + * @brief buffer turn bitmap */ - public Bitmap transferBufferToBitmap(ByteBuffer buffer, BytedEffectConstants.PixlFormat format, - int width, int height) { + + public Bitmap transferBufferToBitmap(ByteBuffer buffer, EffectsSDKEffectConstants.PixlFormat format, + int width, int height){ Bitmap mCameraBitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); buffer.position(0); @@ -513,17 +572,24 @@ public Bitmap transferBufferToBitmap(ByteBuffer buffer, BytedEffectConstants.Pix } - /** - * {zh} + /** {zh} * 在屏幕上渲染纹理 - * - * @param textureId 纹理ID + * @param textureId 纹理ID * @param srcTetxureFormat 纹理格式 - * @param surfaceWidth 视口宽度 - * @param surfaceHeight 视口高度 - * @param mMVPMatrix 旋转矩阵

{en} Render texture on screen + * @param surfaceWidth 视口宽度 + * @param surfaceHeight 视口高度 + * @param mMVPMatrix 旋转矩阵 + */ + /** {en} + * Render texture on screen + * @param textureId texture ID + * @param srcTetxureFormat texture format + * @param surfaceWidth viewport width + * @param surfaceHeight viewport height + * @param mMVPMatrix rotation matrix */ - public void drawFrameOnScreen(int textureId, BytedEffectConstants.TextureFormat srcTetxureFormat, int surfaceWidth, int surfaceHeight, float[] mMVPMatrix) { + + public void drawFrameOnScreen(int textureId,EffectsSDKEffectConstants.TextureFormat srcTetxureFormat,int surfaceWidth, int surfaceHeight, float[]mMVPMatrix) { if (null == mProgramManager) { mProgramManager = new ProgramManager(); } @@ -533,26 +599,22 @@ public void drawFrameOnScreen(int textureId, BytedEffectConstants.TextureFormat } - /** - * The type Transition. + /** {zh} + * @brief 变换方式类 + */ + /** {en} + * @brief Transform mode class */ + public static class Transition { private float[] mMVPMatrix = new float[16]; private int mAngle = 0; - /** - * Instantiates a new Transition. - */ public Transition() { Matrix.setIdentityM(mMVPMatrix, 0); } - /** - * Instantiates a new Transition. - * - * @param transformMatrixArray the transform matrix array - */ public Transition(float[] transformMatrixArray) { for (int i = 0; i < transformMatrixArray.length; i++) { mMVPMatrix[i] = transformMatrixArray[i]; @@ -560,34 +622,31 @@ public Transition(float[] transformMatrixArray) { } - /** - * {zh} - * - * @param x the x - * @param y the y - * @return the transition + /** {zh} + * @brief 镜像 */ + /** {en} + * @brief Mirror image + */ + public Transition flip(boolean x, boolean y) { GlUtil.flip(mMVPMatrix, x, y); return this; } - /** - * Gets angle. - * - * @return the angle - */ public int getAngle() { - return mAngle % 360; + return mAngle%360; } - /** - * {zh} - * + /** {zh} * @param angle 旋转角度,仅支持 0/90/180/270 - * @return the transition + * @brief 旋转 */ + /** {en} + * @brief rotation + */ + public Transition rotate(float angle) { mAngle += angle; GlUtil.rotate(mMVPMatrix, angle); @@ -595,44 +654,34 @@ public Transition rotate(float angle) { } - /** - * Scale transition. - * - * @param sx the sx - * @param sy the sy - * @return the transition - */ - public Transition scale(float sx, float sy) { - GlUtil.scale(mMVPMatrix, sx, sy); + public Transition scale(float sx,float sy) { + GlUtil.scale(mMVPMatrix, sx , sy); return this; } - /** - * Crop transition. - * - * @param scaleType the scale type - * @param rotation the rotation - * @param textureWidth the texture width - * @param textureHeight the texture height - * @param surfaceWidth the surface width - * @param surfaceHeight the surface height - * @return the transition - */ - public Transition crop(ImageView.ScaleType scaleType, int rotation, int textureWidth, int textureHeight, int surfaceWidth, int surfaceHeight) { - if (rotation % 180 == 90) { - GlUtil.getShowMatrix(mMVPMatrix, scaleType, textureHeight, textureWidth, surfaceWidth, surfaceHeight); - } else { - GlUtil.getShowMatrix(mMVPMatrix, scaleType, textureWidth, textureHeight, surfaceWidth, surfaceHeight); + public Transition crop(ImageView.ScaleType scaleType, int rotation, int textureWidth, int textureHeight, int surfaceWidth, int surfaceHeight){ + if (rotation % 180 == 90){ + GlUtil.getShowMatrix(mMVPMatrix,scaleType, textureHeight, textureWidth, surfaceWidth, surfaceHeight); + }else { + GlUtil.getShowMatrix(mMVPMatrix,scaleType, textureWidth, textureHeight, surfaceWidth, surfaceHeight); } return this; } - /** - * {zh} - * + /** {zh} * @return 逆向后的 transition + * @brief 逆向生成新的 transition + * @details 变换操作有顺序之分,本方法可以将一系列操作逆序, + * 如将先镜像再旋转,逆序为先旋转再镜像 + */ + /** {en} + * @return Reverse transition + * @brief Reverse generation of new transition + * @details transformation operations can be divided into sequence. This method can reverse a series of operations, + * such as mirroring first and then rotating, and the reverse order is rotating first and then mirroring */ + public Transition reverse() { float[] invertedMatrix = new float[16]; @@ -644,23 +693,13 @@ public Transition reverse() { } - /** - * Get matrix float [ ]. - * - * @return the float [ ] - */ - public float[] getMatrix() { + public float[] getMatrix(){ return mMVPMatrix; } - /** - * To String. - * - * @return string - */ - public String toString() { - StringBuilder sb = new StringBuilder(); - for (float value : mMVPMatrix) { + public String toString(){ + StringBuilder sb =new StringBuilder(); + for (float value: mMVPMatrix){ sb.append(value).append(" "); } return sb.toString(); diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/LogUtils.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/LogUtils.kt index 2ee86d9a5..1df151011 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/LogUtils.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/LogUtils.kt @@ -24,118 +24,35 @@ package io.agora.beautyapi.bytedance.utils -import android.util.Log -import java.io.File -import java.io.FileOutputStream -import java.text.SimpleDateFormat -import java.util.Date -import java.util.Locale -import java.util.concurrent.Executors +import io.agora.base.internal.Logging -/** - * Log utils - * - * @constructor Create empty Log utils - */ object LogUtils { private const val beautyType = "ByteDance" - private val timeFormat = SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS", Locale.ROOT) - private val logFileName = "agora_beautyapi_${beautyType.toLowerCase(Locale.ROOT)}_android.log" - private val workerThread = Executors.newSingleThreadExecutor() - private var logOutputStream: FileOutputStream? = null - /** - * Set log file path - * - * @param path - */ - @JvmStatic - fun setLogFilePath(path: String){ - if(path.isEmpty()){ - e("LogUtils", "setLogFilePath >> path is empty!") - return - } - val direction = File(path) - if(!direction.exists()){ - direction.mkdirs() - } - val file = File(direction, logFileName) - if(!file.exists()){ - file.createNewFile() - } - val append = file.length() < 2 * 1024 * 1024 - logOutputStream = FileOutputStream(file, append) - } - /** - * I - * - * @param tag - * @param content - * @param args - */ @JvmStatic fun i(tag: String, content: String, vararg args: Any) { val consoleMessage = "[BeautyAPI][$beautyType] : ${String.format(content, args)}" - val fileMessage = "${timeFormat.format(Date())} : [BeautyAPI][$beautyType][$tag][INFO] : ${String.format(content, args)}" - Log.v(tag, consoleMessage) - saveToFile(fileMessage) + Logging.log(Logging.Severity.LS_INFO, tag, consoleMessage) } - /** - * D - * - * @param tag - * @param content - * @param args - */ @JvmStatic fun d(tag: String, content: String, vararg args: Any) { val consoleMessage = "[BeautyAPI][$beautyType] : ${String.format(content, args)}" - val fileMessage = "${timeFormat.format(Date())} : [BeautyAPI][$beautyType][$tag][DEBUG] : ${String.format(content, args)}" - Log.d(tag, consoleMessage) - saveToFile(fileMessage) + Logging.d(tag, consoleMessage) } - /** - * W - * - * @param tag - * @param content - * @param args - */ @JvmStatic fun w(tag: String, content: String, vararg args: Any){ val consoleMessage = "[BeautyAPI][$beautyType] : ${String.format(content, args)}" - val fileMessage = "${timeFormat.format(Date())} : [BeautyAPI][$beautyType][$tag][WARN] : ${String.format(content, args)}" - Log.w(tag, consoleMessage) - saveToFile(fileMessage) + Logging.w(tag, consoleMessage) } - /** - * E - * - * @param tag - * @param content - * @param args - */ @JvmStatic fun e(tag: String, content: String, vararg args: Any){ val consoleMessage = "[BeautyAPI][$beautyType] : ${String.format(content, args)}" - val fileMessage = "${timeFormat.format(Date())} : [BeautyAPI][$beautyType][$tag][ERROR] : ${String.format(content, args)}" - Log.e(tag, consoleMessage) - saveToFile(fileMessage) + Logging.e(tag, consoleMessage) } - - private fun saveToFile(message: String){ - val outputStream = logOutputStream ?: return - workerThread.execute { - outputStream.write(message.toByteArray()) - if(!message.endsWith("\n")){ - outputStream.write("\n".toByteArray()) - } - } - } } \ No newline at end of file diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/StatsHelper.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/StatsHelper.kt index b4399ed7e..2f2abbe98 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/StatsHelper.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/StatsHelper.kt @@ -30,13 +30,6 @@ import io.agora.beautyapi.bytedance.BeautyStats import kotlin.math.max import kotlin.math.min -/** - * Stats helper - * - * @property statsDuration - * @property onStatsChanged - * @constructor Create empty Stats helper - */ class StatsHelper( private val statsDuration: Long, private val onStatsChanged: (BeautyStats) -> Unit @@ -48,11 +41,6 @@ class StatsHelper( private var mCostMax = 0L private var mCostMin = Long.MAX_VALUE - /** - * Once - * - * @param cost - */ fun once(cost: Long) { val curr = System.currentTimeMillis() if (mStartTime == 0L) { @@ -80,10 +68,6 @@ class StatsHelper( mCostMin = min(mCostMin, cost) } - /** - * Reset - * - */ fun reset() { mMainHandler.removeCallbacksAndMessages(null) mStartTime = 0 diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/opengl/Drawable2d.java b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/opengl/Drawable2d.java index e807db10a..0e5e13c74 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/opengl/Drawable2d.java +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/opengl/Drawable2d.java @@ -32,33 +32,24 @@ */ public class Drawable2d { private static final int SIZEOF_FLOAT = 4; - /** - * The constant COORDS_PER_VERTEX. - */ public static final int COORDS_PER_VERTEX = 2; - /** - * The constant TEXTURE_COORD_STRIDE. - */ public static final int TEXTURE_COORD_STRIDE = COORDS_PER_VERTEX * SIZEOF_FLOAT; - /** - * The constant VERTEXTURE_STRIDE. - */ public static final int VERTEXTURE_STRIDE = COORDS_PER_VERTEX * SIZEOF_FLOAT; /** * Simple equilateral triangle (1.0 per side). Centered on (0,0). */ - private static final float[] TRIANGLE_COORDS = { - 0.0f, 0.577350269f, // 0 top - -0.5f, -0.288675135f, // 1 bottom left - 0.5f, -0.288675135f // 2 bottom right + private static final float TRIANGLE_COORDS[] = { + 0.0f, 0.577350269f, // 0 top + -0.5f, -0.288675135f, // 1 bottom left + 0.5f, -0.288675135f // 2 bottom right }; - private static final float[] TRIANGLE_TEX_COORDS = { - 0.5f, 0.0f, // 0 top center - 0.0f, 1.0f, // 1 bottom left - 1.0f, 1.0f, // 2 bottom right + private static final float TRIANGLE_TEX_COORDS[] = { + 0.5f, 0.0f, // 0 top center + 0.0f, 1.0f, // 1 bottom left + 1.0f, 1.0f, // 2 bottom right }; private static final FloatBuffer TRIANGLE_BUF = GlUtil.createFloatBuffer(TRIANGLE_COORDS); @@ -71,25 +62,28 @@ public class Drawable2d { *

* Triangles are 0-1-2 and 2-1-3 (counter-clockwise winding). */ - private static final float[] RECTANGLE_COORDS = { - -0.5f, -0.5f, // 0 bottom left - 0.5f, -0.5f, // 1 bottom right - -0.5f, 0.5f, // 2 top left - 0.5f, 0.5f, // 3 top right + private static final float RECTANGLE_COORDS[] = { + -0.5f, -0.5f, // 0 bottom left + 0.5f, -0.5f, // 1 bottom right + -0.5f, 0.5f, // 2 top left + 0.5f, 0.5f, // 3 top right }; - /** + /** {zh} + * FrameBuffer 与屏幕的坐标系是垂直镜像的,所以在将纹理绘制到一个 FrameBuffer 或屏幕上 + * 的时候,他们用的纹理顶点坐标是不同的,需要注意。 + */ + /** {en} * The coordinate system of the FrameBuffer and the screen is mirrored vertically, so when drawing the texture to a FrameBuffer or screen * , the vertex coordinates of the texture they use are different, which needs attention. - *

FrameBuffer 与屏幕的坐标系是垂直镜像的,所以在将纹理绘制到一个 FrameBuffer 或屏幕上 - * 的时候,他们用的纹理顶点坐标是不同的,需要注意。 */ - private static final float[] RECTANGLE_TEX_COORDS = { + + private static final float RECTANGLE_TEX_COORDS[] = { 0.0f, 1.0f, // 0 bottom left 1.0f, 1.0f, // 1 bottom right 0.0f, 0.0f, // 2 top left 1.0f, 0.0f // 3 top right }; - private static final float[] RECTANGLE_TEX_COORDS1 = { + private static final float RECTANGLE_TEX_COORDS1[] = { 0.0f, 0.0f, // 0 bottom left 1.0f, 0.0f, // 1 bottom right 0.0f, 1.0f, // 2 top left @@ -109,31 +103,33 @@ public class Drawable2d { * The texture coordinates are Y-inverted relative to RECTANGLE. (This seems to work out * right with external textures from SurfaceTexture.) */ - private static final float[] FULL_RECTANGLE_COORDS = { - -1.0f, -1.0f, // 0 bottom left - 1.0f, -1.0f, // 1 bottom right - -1.0f, 1.0f, // 2 top left - 1.0f, 1.0f, // 3 top right + private static final float FULL_RECTANGLE_COORDS[] = { + -1.0f, -1.0f, // 0 bottom left + 1.0f, -1.0f, // 1 bottom right + -1.0f, 1.0f, // 2 top left + 1.0f, 1.0f, // 3 top right }; - - /** + /** {zh} + * FrameBuffer 与屏幕的坐标系是垂直镜像的,所以在将纹理绘制到一个 FrameBuffer 或屏幕上 + * 的时候,他们用的纹理顶点坐标是不同的,需要注意。 + */ + /** {en} * The coordinate system of the FrameBuffer and the screen is mirrored vertically, so when drawing the texture to a FrameBuffer or screen * , the vertex coordinates of the texture they use are different, which needs attention. - *

FrameBuffer 与屏幕的坐标系是垂直镜像的,所以在将纹理绘制到一个 FrameBuffer 或屏幕上 - * 的时候,他们用的纹理顶点坐标是不同的,需要注意。 */ - private static final float[] FULL_RECTANGLE_TEX_COORDS = { - 0.0f, 1.0f, // 0 bottom left - 1.0f, 1.0f, // 1 bottom right - 0.0f, 0.0f, // 2 top left - 1.0f, 0.0f // 3 top right + + private static final float FULL_RECTANGLE_TEX_COORDS[] = { + 0.0f, 1.0f, // 0 bottom left + 1.0f, 1.0f, // 1 bottom right + 0.0f, 0.0f, // 2 top left + 1.0f, 0.0f // 3 top right }; - private static final float[] FULL_RECTANGLE_TEX_COORDS1 = { - 0.0f, 0.0f, // 0 bottom left - 1.0f, 0.0f, // 1 bottom right - 0.0f, 1.0f, // 2 top left - 1.0f, 1.0f // 3 top right + private static final float FULL_RECTANGLE_TEX_COORDS1[] = { + 0.0f, 0.0f, // 0 bottom left + 1.0f, 0.0f, // 1 bottom right + 0.0f, 1.0f, // 2 top left + 1.0f, 1.0f // 3 top right }; private static final FloatBuffer FULL_RECTANGLE_BUF = GlUtil.createFloatBuffer(FULL_RECTANGLE_COORDS); @@ -156,26 +152,13 @@ public class Drawable2d { * Enum values for constructor. */ public enum Prefab { - /** - * Triangle prefab. - */ - TRIANGLE, - /** - * Rectangle prefab. - */ - RECTANGLE, - /** - * Full rectangle prefab. - */ - FULL_RECTANGLE + TRIANGLE, RECTANGLE, FULL_RECTANGLE } /** * Prepares a drawable from a "pre-fabricated" shape definition. *

* Does no EGL/GL operations, so this can be done at any time. - * - * @param shape the shape */ public Drawable2d(Prefab shape) { switch (shape) { @@ -214,8 +197,6 @@ public Drawable2d(Prefab shape) { * Returns the array of vertices. *

* To avoid allocations, this returns internal state. The caller must not modify it. - * - * @return the vertex array */ public FloatBuffer getVertexArray() { return mVertexArray; @@ -225,27 +206,24 @@ public FloatBuffer getVertexArray() { * Returns the array of texture coordinates. *

* To avoid allocations, this returns internal state. The caller must not modify it. - * - * @return the tex coord array */ public FloatBuffer getTexCoordArray() { return mTexCoordArray; } - - /** - * Gets tex coor array fb. - * - * @return the tex coor array fb + /** {zh} + * @brief 返回 frameBuffer 绘制用 texture coordinates */ + /** {en} + * @brief Returns texture coordinates for drawing frameBuffer + */ + public FloatBuffer getTexCoorArrayFB() { return mTexCoordArrayFB; } /** * Returns the number of vertices stored in the vertex array. - * - * @return the vertex count */ public int getVertexCount() { return mVertexCount; @@ -253,8 +231,6 @@ public int getVertexCount() { /** * Returns the width, in bytes, of the data for each vertex. - * - * @return the vertex stride */ public int getVertexStride() { return mVertexStride; @@ -262,8 +238,6 @@ public int getVertexStride() { /** * Returns the width, in bytes, of the data for each texture coordinate. - * - * @return the tex coord stride */ public int getTexCoordStride() { return mTexCoordStride; @@ -271,37 +245,20 @@ public int getTexCoordStride() { /** * Returns the number of position coordinates per vertex. This will be 2 or 3. - * - * @return the coords per vertex */ public int getCoordsPerVertex() { return mCoordsPerVertex; } - /** - * Update vertex array. - * - * @param fullRectangleCoords the full rectangle coords - */ - public void updateVertexArray(float[] fullRectangleCoords) { - mVertexArray = GlUtil.createFloatBuffer(fullRectangleCoords); - mVertexCount = fullRectangleCoords.length / COORDS_PER_VERTEX; + public void updateVertexArray(float[] FULL_RECTANGLE_COORDS) { + mVertexArray = GlUtil.createFloatBuffer(FULL_RECTANGLE_COORDS); + mVertexCount = FULL_RECTANGLE_COORDS.length / COORDS_PER_VERTEX; } - /** - * Update tex coord array. - * - * @param fullRectangleTexCoords the full rectangle tex coords - */ - public void updateTexCoordArray(float[] fullRectangleTexCoords) { - mTexCoordArray = GlUtil.createFloatBuffer(fullRectangleTexCoords); + public void updateTexCoordArray(float[] FULL_RECTANGLE_TEX_COORDS) { + mTexCoordArray = GlUtil.createFloatBuffer(FULL_RECTANGLE_TEX_COORDS); } - /** - * Update tex coord array fb. - * - * @param coords the coords - */ public void updateTexCoordArrayFB(float[] coords) { mTexCoordArrayFB = GlUtil.createFloatBuffer(coords); } diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/opengl/Extensions.java b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/opengl/Extensions.java index 2d43b82b7..1b90c1b7c 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/opengl/Extensions.java +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/opengl/Extensions.java @@ -30,17 +30,8 @@ import java.io.IOException; import java.io.InputStream; -/** - * The type Extensions. - */ public abstract class Extensions { - /** - * Get bytes byte [ ]. - * - * @param inputStream the input stream - * @return the byte [ ] - */ public static byte[] getBytes(InputStream inputStream) { try { byte[] bytes = new byte[inputStream.available()]; @@ -54,13 +45,6 @@ public static byte[] getBytes(InputStream inputStream) { return new byte[0]; } - /** - * Get bytes byte [ ]. - * - * @param assetManager the asset manager - * @param fileName the file name - * @return the byte [ ] - */ public static byte[] getBytes(AssetManager assetManager, String fileName) { try { return getBytes(assetManager.open(fileName)); @@ -71,13 +55,6 @@ public static byte[] getBytes(AssetManager assetManager, String fileName) { return new byte[0]; } - /** - * Read text file from resource string. - * - * @param context the context - * @param resourceId the resource id - * @return the string - */ public static String readTextFileFromResource(Context context, int resourceId) { return new String(Extensions.getBytes(context.getResources().openRawResource(resourceId))); } diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/opengl/GlUtil.java b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/opengl/GlUtil.java index 3b0574d54..751e87e99 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/opengl/GlUtil.java +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/opengl/GlUtil.java @@ -48,14 +48,8 @@ * Some OpenGL utility functions. */ public abstract class GlUtil { - /** - * The constant TAG. - */ public static final String TAG = GlUtil.class.getSimpleName(); - /** - * The constant NO_TEXTURE. - */ public static final int NO_TEXTURE = -1; // public static final int TYPE_FITXY=0; // public static final int TYPE_CENTERCROP=1; @@ -63,14 +57,8 @@ public abstract class GlUtil { // public static final int TYPE_FITSTART=3; // public static final int TYPE_FITEND=4; - /** - * The constant x_scale. - */ - public static final float X_SCALE = 1.0f; - /** - * The constant y_scale. - */ - public static final float Y_SCALE = 1.0f; + public static float x_scale = 1.0f; + public static float y_scale = 1.0f; /** * Identity matrix for general use. Don't modify or life will get weird. @@ -91,8 +79,6 @@ private GlUtil() { /** * Creates a new program from the supplied vertex and fragment shaders. * - * @param vertexSource the vertex source - * @param fragmentSource the fragment source * @return A handle to the program, or 0 on failure. */ public static int createProgram(String vertexSource, String fragmentSource) { @@ -129,8 +115,6 @@ public static int createProgram(String vertexSource, String fragmentSource) { /** * Compiles the provided shader source. * - * @param shaderType the shader type - * @param source the source * @return A handle to the shader, or 0 on failure. */ public static int loadShader(int shaderType, String source) { @@ -151,8 +135,6 @@ public static int loadShader(int shaderType, String source) { /** * Checks to see if a GLES error has been raised. - * - * @param op the op */ public static void checkGlError(String op) { int error = GLES20.glGetError(); @@ -167,9 +149,6 @@ public static void checkGlError(String op) { * could not be found, but does not set the GL error. *

* Throws a RuntimeException if the location is invalid. - * - * @param location the location - * @param label the label */ public static void checkLocation(int location, String label) { if (location < 0) { @@ -178,6 +157,7 @@ public static void checkLocation(int location, String label) { } + /** * Creates a texture from raw data. * @@ -221,9 +201,7 @@ public static int createImageTexture(ByteBuffer data, int width, int height, int * @return Handle to texture. */ public static int createImageTexture(Bitmap bmp) { - if (null == bmp || bmp.isRecycled()) { - return NO_TEXTURE; - } + if (null == bmp || bmp.isRecycled())return NO_TEXTURE; int[] textureHandles = new int[1]; int textureHandle; GLES20.glGenTextures(1, textureHandles, 0); @@ -254,9 +232,6 @@ public static int createImageTexture(Bitmap bmp) { /** * Allocates a direct float buffer, and populates it with the float array data. - * - * @param coords the coords - * @return the float buffer */ public static FloatBuffer createFloatBuffer(float[] coords) { // Allocate a direct ByteBuffer, using 4 bytes per float, and copy coords into it. @@ -268,15 +243,6 @@ public static FloatBuffer createFloatBuffer(float[] coords) { return fb; } - /** - * Change mvp matrix crop float [ ]. - * - * @param viewWidth the view width - * @param viewHeight the view height - * @param textureWidth the texture width - * @param textureHeight the texture height - * @return the float [ ] - */ public static float[] changeMVPMatrixCrop(float viewWidth, float viewHeight, float textureWidth, float textureHeight) { float scale = viewWidth * textureHeight / viewHeight / textureWidth; float[] mvp = new float[16]; @@ -289,9 +255,6 @@ public static float[] changeMVPMatrixCrop(float viewWidth, float viewHeight, flo * Creates a texture object suitable for use with this program. *

* On exit, the texture will be bound. - * - * @param textureTarget the texture target - * @return the int */ public static int createTextureObject(int textureTarget) { int[] textures = new int[1]; @@ -311,37 +274,18 @@ public static int createTextureObject(int textureTarget) { return texId; } - /** - * Delete texture id. - * - * @param textureId the texture id - */ public static void deleteTextureId(int[] textureId) { if (textureId != null && textureId.length > 0) { GLES20.glDeleteTextures(textureId.length, textureId, 0); } } - - /** - * Delete texture id. - * - * @param textureId the texture id - */ public static void deleteTextureId(int textureId) { int[] textures = new int[1]; - textures[0] = textureId; + textures[0]= textureId; GLES20.glDeleteTextures(textures.length, textures, 0); } - /** - * Create fbo. - * - * @param fboTex the fbo tex - * @param fboId the fbo id - * @param width the width - * @param height the height - */ public static void createFBO(int[] fboTex, int[] fboId, int width, int height) { //generate fbo id GLES20.glGenFramebuffers(1, fboId, 0); @@ -365,27 +309,12 @@ public static void createFBO(int[] fboTex, int[] fboId, int width, int height) { GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); } - /** - * Delete fbo. - * - * @param fboId the fbo id - */ public static void deleteFBO(int[] fboId) { if (fboId != null && fboId.length > 0) { GLES20.glDeleteFramebuffers(fboId.length, fboId, 0); } } - /** - * Change mvp matrix crop float [ ]. - * - * @param mvpMatrix the mvp matrix - * @param viewWidth the view width - * @param viewHeight the view height - * @param textureWidth the texture width - * @param textureHeight the texture height - * @return the float [ ] - */ public static float[] changeMVPMatrixCrop(float[] mvpMatrix, float viewWidth, float viewHeight, float textureWidth, float textureHeight) { float scale = viewWidth * textureHeight / viewHeight / textureWidth; if (scale == 1.0f) { @@ -401,106 +330,74 @@ public static float[] changeMVPMatrixCrop(float[] mvpMatrix, float viewWidth, fl } - /** - * Gets show matrix. - * - * @param matrix the matrix - * @param imgWidth the img width - * @param imgHeight the img height - * @param viewWidth the view width - * @param viewHeight the view height - */ - public static void getShowMatrix(float[] matrix, int imgWidth, int imgHeight, int viewWidth, int viewHeight) { - if (imgHeight > 0 && imgWidth > 0 && viewWidth > 0 && viewHeight > 0) { - float sWhView = (float) viewWidth / viewHeight; - float sWhImg = (float) imgWidth / imgHeight; - float[] projection = new float[16]; - float[] camera = new float[16]; - if (sWhImg > sWhView) { - Matrix.orthoM(projection, 0, -sWhView / sWhImg, sWhView / sWhImg, -1, 1, 1, 3); - } else { - Matrix.orthoM(projection, 0, -1, 1, -sWhImg / sWhView, sWhImg / sWhView, 1, 3); + public static void getShowMatrix(float[] matrix,int imgWidth,int imgHeight,int viewWidth,int viewHeight){ + if(imgHeight>0&&imgWidth>0&&viewWidth>0&&viewHeight>0){ + float sWhView=(float)viewWidth/viewHeight; + float sWhImg=(float)imgWidth/imgHeight; + float[] projection=new float[16]; + float[] camera=new float[16]; + if(sWhImg>sWhView){ + Matrix.orthoM(projection,0,-sWhView/sWhImg,sWhView/sWhImg,-1,1,1,3); + }else{ + Matrix.orthoM(projection,0,-1,1,-sWhImg/sWhView,sWhImg/sWhView,1,3); } - Matrix.setLookAtM(camera, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0); - Matrix.multiplyMM(matrix, 0, projection, 0, camera, 0); + Matrix.setLookAtM(camera,0,0,0,1,0,0,0,0,1,0); + Matrix.multiplyMM(matrix,0,projection,0,camera,0); } } - /** - * Gets show matrix. - * - * @param matrix the matrix - * @param type the type - * @param imgWidth the img width - * @param imgHeight the img height - * @param viewWidth the view width - * @param viewHeight the view height - */ public static void getShowMatrix(float[] matrix, ImageView.ScaleType type, int imgWidth, int imgHeight, int viewWidth, - int viewHeight) { - if (imgHeight > 0 && imgWidth > 0 && viewWidth > 0 && viewHeight > 0) { - float[] projection = new float[16]; - float[] camera = new float[16]; - if (type == FIT_XY) { - Matrix.orthoM(projection, 0, -1, 1, -1, 1, 1, 3); - Matrix.setLookAtM(camera, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0); - Matrix.multiplyMM(matrix, 0, projection, 0, camera, 0); + int viewHeight){ + if(imgHeight>0&&imgWidth>0&&viewWidth>0&&viewHeight>0){ + float[] projection=new float[16]; + float[] camera=new float[16]; + if(type== FIT_XY){ + Matrix.orthoM(projection,0,-1,1,-1,1,1,3); + Matrix.setLookAtM(camera,0,0,0,1,0,0,0,0,1,0); + Matrix.multiplyMM(matrix,0,projection,0,camera,0); } - float sWhView = (float) viewWidth / viewHeight; - float sWhImg = (float) imgWidth / imgHeight; - if (sWhImg > sWhView) { - switch (type) { + float sWhView=(float)viewWidth/viewHeight; + float sWhImg=(float)imgWidth/imgHeight; + if(sWhImg>sWhView){ + switch (type){ case CENTER_CROP: - Matrix.orthoM(projection, 0, -sWhView / sWhImg, sWhView / sWhImg, -1, 1, 1, 3); - Matrix.scaleM(projection, 0, X_SCALE, Y_SCALE, 1); + Matrix.orthoM(projection,0,-sWhView/sWhImg,sWhView/sWhImg,-1,1,1,3); + Matrix.scaleM(projection,0,x_scale,y_scale,1); break; case CENTER_INSIDE: - Matrix.orthoM(projection, 0, -1, 1, -sWhImg / sWhView, sWhImg / sWhView, 1, 3); + Matrix.orthoM(projection,0,-1,1,-sWhImg/sWhView,sWhImg/sWhView,1,3); break; case FIT_START: - Matrix.orthoM(projection, 0, -1, 1, 1 - 2 * sWhImg / sWhView, 1, 1, 3); + Matrix.orthoM(projection,0,-1,1,1-2*sWhImg/sWhView,1,1,3); break; case FIT_END: - Matrix.orthoM(projection, 0, -1, 1, -1, 2 * sWhImg / sWhView - 1, 1, 3); + Matrix.orthoM(projection,0,-1,1,-1,2*sWhImg/sWhView-1,1,3); break; - default: - // do nothing } - } else { - switch (type) { + }else{ + switch (type){ case CENTER_CROP: - Matrix.orthoM(projection, 0, -1, 1, -sWhImg / sWhView, sWhImg / sWhView, 1, 3); - Matrix.scaleM(projection, 0, X_SCALE, Y_SCALE, 1); + Matrix.orthoM(projection,0,-1,1,-sWhImg/sWhView,sWhImg/sWhView,1,3); + Matrix.scaleM(projection,0,x_scale,y_scale,1); break; case CENTER_INSIDE: - Matrix.orthoM(projection, 0, -sWhView / sWhImg, sWhView / sWhImg, -1, 1, 1, 3); + Matrix.orthoM(projection,0,-sWhView/sWhImg,sWhView/sWhImg,-1,1,1,3); break; case FIT_START: - Matrix.orthoM(projection, 0, -1, 2 * sWhView / sWhImg - 1, -1, 1, 1, 3); + Matrix.orthoM(projection,0,-1,2*sWhView/sWhImg-1,-1,1,1,3); break; case FIT_END: - Matrix.orthoM(projection, 0, 1 - 2 * sWhView / sWhImg, 1, -1, 1, 1, 3); + Matrix.orthoM(projection,0,1-2*sWhView/sWhImg,1,-1,1,1,3); break; - default: - // do nothing } } - Matrix.setLookAtM(camera, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0); - Matrix.multiplyMM(matrix, 0, projection, 0, camera, 0); + Matrix.setLookAtM(camera,0,0,0,1,0,0,0,0,1,0); + Matrix.multiplyMM(matrix,0,projection,0,camera,0); } } - /** - * Change mvp matrix inside float [ ]. - * - * @param viewWidth the view width - * @param viewHeight the view height - * @param textureWidth the texture width - * @param textureHeight the texture height - * @return the float [ ] - */ public static float[] changeMVPMatrixInside(float viewWidth, float viewHeight, float textureWidth, float textureHeight) { float scale = viewWidth * textureHeight / viewHeight / textureWidth; float[] mvp = new float[16]; @@ -512,8 +409,8 @@ public static float[] changeMVPMatrixInside(float viewWidth, float viewHeight, f /** * Prefer OpenGL ES 3.0, otherwise 2.0 * - * @param context the context - * @return support gl version + * @param context + * @return */ public static int getSupportGLVersion(Context context) { final ActivityManager activityManager = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE); @@ -526,55 +423,24 @@ public static int getSupportGLVersion(Context context) { } - /** - * Rotate float [ ]. - * - * @param m the m - * @param angle the angle - * @return the float [ ] - */ - public static float[] rotate(float[] m, float angle) { - Matrix.rotateM(m, 0, angle, 0, 0, 1); + public static float[] rotate(float[] m,float angle){ + Matrix.rotateM(m,0,angle,0,0,1); return m; } - /** - * Flip float [ ]. - * - * @param m the m - * @param x the x - * @param y the y - * @return the float [ ] - */ - public static float[] flip(float[] m, boolean x, boolean y) { - if (x || y) { - Matrix.scaleM(m, 0, x ? -1 : 1, y ? -1 : 1, 1); + public static float[] flip(float[] m,boolean x,boolean y){ + if(x||y){ + Matrix.scaleM(m,0,x?-1:1,y?-1:1,1); } return m; } - /** - * Scale float [ ]. - * - * @param m the m - * @param x the x - * @param y the y - * @return the float [ ] - */ - public static float[] scale(float[] m, float x, float y) { - Matrix.scaleM(m, 0, x, y, 1); + public static float[] scale(float[] m,float x,float y){ + Matrix.scaleM(m,0,x,y,1); return m; } - /** - * Read pixles buffer byte buffer. - * - * @param textureId the texture id - * @param width the width - * @param height the height - * @return the byte buffer - */ public static ByteBuffer readPixlesBuffer(int textureId, int width, int height) { if (textureId == GlUtil.NO_TEXTURE) { @@ -612,12 +478,7 @@ public static ByteBuffer readPixlesBuffer(int textureId, int width, int height) return mCaptureBuffer; } - /** - * Gets external oes texture id. - * - * @return the external oes texture id - */ - public static int getExternalOESTextureID() { + public static int getExternalOESTextureID(){ int[] texture = new int[1]; GLES20.glGenTextures(1, texture, 0); diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/opengl/Program.java b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/opengl/Program.java index 3152c5606..71571a0c5 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/opengl/Program.java +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/opengl/Program.java @@ -31,96 +31,44 @@ import java.nio.ByteBuffer; -/** - * The type Program. - */ public abstract class Program { private static final String TAG = GlUtil.TAG; - /** - * The M program handle. - */ -// Handles to the GL program and various components of it. + // Handles to the GL program and various components of it. protected int mProgramHandle; - /** - * The M drawable 2 d. - */ protected Drawable2d mDrawable2d; - /** - * The M frame buffers. - */ protected int[] mFrameBuffers; - /** - * The M frame buffer textures. - */ protected int[] mFrameBufferTextures; - /** - * The Frame buffer num. - */ - protected int frameBufferNum = 1; - /** - * The M frame buffer shape. - */ + protected int FRAME_BUFFER_NUM = 1; protected Point mFrameBufferShape; - /** * Prepares the program in the current EGL context. - * - * @param vertexShader the vertex shader - * @param fragmentShader2D the fragment shader 2 d */ - public Program(String vertexShader, String fragmentShader2D) { - mProgramHandle = GlUtil.createProgram(vertexShader, fragmentShader2D); + public Program(String VERTEX_SHADER, String FRAGMENT_SHADER_2D) { + mProgramHandle = GlUtil.createProgram(VERTEX_SHADER, FRAGMENT_SHADER_2D); mDrawable2d = getDrawable2d(); getLocations(); } - /** - * Instantiates a new Program. - * - * @param context the context - * @param vertexShaderResourceId the vertex shader resource id - * @param fragmentShaderResourceId the fragment shader resource id - */ public Program(Context context, int vertexShaderResourceId, int fragmentShaderResourceId) { this(Extensions.readTextFileFromResource(context, vertexShaderResourceId), Extensions.readTextFileFromResource(context, fragmentShaderResourceId)); } - /** - * Update vertex array. - * - * @param fullRectangleCoords the full rectangle coords - */ - public void updateVertexArray(float[] fullRectangleCoords) { - mDrawable2d.updateVertexArray(fullRectangleCoords); + public void updateVertexArray(float[] FULL_RECTANGLE_COORDS) { + mDrawable2d.updateVertexArray(FULL_RECTANGLE_COORDS); } - /** - * Update tex coord array. - * - * @param fullRectangleTexCoords the full rectangle tex coords - */ - public void updateTexCoordArray(float[] fullRectangleTexCoords) { - mDrawable2d.updateTexCoordArray(fullRectangleTexCoords); + public void updateTexCoordArray(float[] FULL_RECTANGLE_TEX_COORDS) { + mDrawable2d.updateTexCoordArray(FULL_RECTANGLE_TEX_COORDS); } - /** - * Update tex coord array fb. - * - * @param coords the coords - */ public void updateTexCoordArrayFB(float[] coords) { mDrawable2d.updateTexCoordArrayFB(coords); } - /** - * Gets drawable 2 d. - * - * @return the drawable 2 d - */ protected abstract Drawable2d getDrawable2d(); /** @@ -130,42 +78,15 @@ public void updateTexCoordArrayFB(float[] coords) { /** * Issues the draw call. Does the full setup on every call. - * - * @param textureId the texture id - * @param width the width - * @param height the height - * @param mvpMatrix the mvp matrix */ public abstract void drawFrameOnScreen(int textureId, int width, int height, float[] mvpMatrix); - /** - * Draw frame off screen int. - * - * @param textureId the texture id - * @param width the width - * @param height the height - * @param mvpMatrix the mvp matrix - * @return the int - */ - public abstract int drawFrameOffScreen(int textureId, int width, int height, float[] mvpMatrix); - /** - * Read buffer byte buffer. - * - * @param textureId the texture id - * @param width the width - * @param height the height - * @return the byte buffer - */ + public abstract int drawFrameOffScreen(int textureId,int width, int height, float[] mvpMatrix); + public abstract ByteBuffer readBuffer(int textureId, int width, int height); - /** - * Init frame buffer if need. - * - * @param width the width - * @param height the height - */ protected void initFrameBufferIfNeed(int width, int height) { boolean need = false; if (null == mFrameBufferShape || mFrameBufferShape.x != width || mFrameBufferShape.y != height) { @@ -175,11 +96,11 @@ protected void initFrameBufferIfNeed(int width, int height) { need = true; } if (need) { - mFrameBuffers = new int[frameBufferNum]; - mFrameBufferTextures = new int[frameBufferNum]; - GLES20.glGenFramebuffers(frameBufferNum, mFrameBuffers, 0); - GLES20.glGenTextures(frameBufferNum, mFrameBufferTextures, 0); - for (int i = 0; i < frameBufferNum; i++) { + mFrameBuffers = new int[FRAME_BUFFER_NUM]; + mFrameBufferTextures = new int[FRAME_BUFFER_NUM]; + GLES20.glGenFramebuffers(FRAME_BUFFER_NUM, mFrameBuffers, 0); + GLES20.glGenTextures(FRAME_BUFFER_NUM, mFrameBufferTextures, 0); + for (int i = 0; i < FRAME_BUFFER_NUM; i++) { bindFrameBuffer(mFrameBufferTextures[i], mFrameBuffers[i], width, height); } mFrameBufferShape = new Point(width, height); @@ -190,15 +111,26 @@ protected void initFrameBufferIfNeed(int width, int height) { private void destroyFrameBuffers() { if (mFrameBufferTextures != null) { - GLES20.glDeleteTextures(frameBufferNum, mFrameBufferTextures, 0); + GLES20.glDeleteTextures(FRAME_BUFFER_NUM, mFrameBufferTextures, 0); mFrameBufferTextures = null; } if (mFrameBuffers != null) { - GLES20.glDeleteFramebuffers(frameBufferNum, mFrameBuffers, 0); + GLES20.glDeleteFramebuffers(FRAME_BUFFER_NUM, mFrameBuffers, 0); mFrameBuffers = null; } } + /** {zh} + * 纹理参数设置+buffer绑定 + * set texture params + * and bind buffer + */ + /** {en} + * Texture parameter setting + buffer binding + * set texture params + * and binding buffer + */ + private void bindFrameBuffer(int textureId, int frameBuffer, int width, int height) { GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId); GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/opengl/ProgramManager.java b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/opengl/ProgramManager.java index d227e6da6..d536a6f5f 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/opengl/ProgramManager.java +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/opengl/ProgramManager.java @@ -25,27 +25,21 @@ package io.agora.beautyapi.bytedance.utils.opengl; -import com.bytedance.labcv.effectsdk.BytedEffectConstants; +import com.effectsar.labcv.effectsdk.EffectsSDKEffectConstants; -/** - * The type Program manager. - */ public class ProgramManager { + public ProgramManager() { + } + private ProgramTexture2d mProgramTexture2D; private ProgramTextureOES mProgramTextureOES; - /** - * Gets program. - * - * @param srcTetxureFormat the src tetxure format - * @return the program - */ - public Program getProgram(BytedEffectConstants.TextureFormat srcTetxureFormat) { - switch (srcTetxureFormat) { + public Program getProgram(EffectsSDKEffectConstants.TextureFormat srcTetxureFormat){ + switch (srcTetxureFormat){ case Texure2D: - if (null == mProgramTexture2D) { + if (null == mProgramTexture2D){ mProgramTexture2D = new ProgramTexture2d(); } return mProgramTexture2D; @@ -54,21 +48,18 @@ public Program getProgram(BytedEffectConstants.TextureFormat srcTetxureFormat) { mProgramTextureOES = new ProgramTextureOES(); } return mProgramTextureOES; - default: - return null; } + return null; + } - /** - * Release. - */ - public void release() { - if (null != mProgramTexture2D) { + public void release(){ + if (null != mProgramTexture2D){ mProgramTexture2D.release(); mProgramTexture2D = null; } - if (null != mProgramTextureOES) { + if (null != mProgramTextureOES){ mProgramTextureOES.release(); mProgramTextureOES = null; diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/opengl/ProgramTexture2d.java b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/opengl/ProgramTexture2d.java index 3aab4a67e..b81a0525f 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/opengl/ProgramTexture2d.java +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/opengl/ProgramTexture2d.java @@ -32,38 +32,32 @@ import java.nio.ByteBuffer; -/** - * The type Program texture 2 d. - */ public class ProgramTexture2d extends Program { // Simple vertex shader, used for all programs. private static final String VERTEX_SHADER = - "uniform mat4 uMVPMatrix;\n" - + "attribute vec4 aPosition;\n" - + "attribute vec2 aTextureCoord;\n" - + "varying vec2 vTextureCoord;\n" - + "void main() {\n" - + " gl_Position = uMVPMatrix * aPosition;\n" - + " vTextureCoord = aTextureCoord;\n" - + "}\n"; + "uniform mat4 uMVPMatrix;\n" + + "attribute vec4 aPosition;\n" + + "attribute vec2 aTextureCoord;\n" + + "varying vec2 vTextureCoord;\n" + + "void main() {\n" + + " gl_Position = uMVPMatrix * aPosition;\n" + + " vTextureCoord = aTextureCoord;\n" + + "}\n"; // Simple fragment shader for use with "normal" 2D textures. private static final String FRAGMENT_SHADER_2D = - "precision mediump float;\n" - + "varying vec2 vTextureCoord;\n" - + "uniform sampler2D sTexture;\n" - + "void main() {\n" - + " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" - + "}\n"; + "precision mediump float;\n" + + "varying vec2 vTextureCoord;\n" + + "uniform sampler2D sTexture;\n" + + "void main() {\n" + + " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" + + "}\n"; private int muMVPMatrixLoc; private int maPositionLoc; private int maTextureCoordLoc; - /** - * Instantiates a new Program texture 2 d. - */ public ProgramTexture2d() { super(VERTEX_SHADER, FRAGMENT_SHADER_2D); } @@ -122,6 +116,7 @@ public void drawFrameOnScreen(int textureId, int width, int height, float[] mvpM GLES20.glViewport(0, 0, width, height); + // Draw the rect. GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, mDrawable2d.getVertexCount()); GlUtil.checkGlError("glDrawArrays"); @@ -192,17 +187,29 @@ public int drawFrameOffScreen(int textureId, int width, int height, float[] mvpM return mFrameBufferTextures[0]; } + /** {zh} + * 读取渲染结果的buffer + * @param width 目标宽度 + * @param height 目标高度 + * @return 渲染结果的像素Buffer 格式RGBA + */ + /** {en} + * Read the buffer + * @param width target width + * @param height target height + * @return pixel Buffer format of the rendered result RGBA + */ + private int mWidth = 0; private int mHeight = 0; private ByteBuffer mCaptureBuffer = null; - @Override public ByteBuffer readBuffer(int textureId, int width, int height) { - if (textureId == GlUtil.NO_TEXTURE) { + if ( textureId == GlUtil.NO_TEXTURE) { return null; } - if (width * height == 0) { - return null; + if (width* height == 0){ + return null; } if (mCaptureBuffer == null || mWidth * mHeight != width * height) { @@ -212,7 +219,7 @@ public ByteBuffer readBuffer(int textureId, int width, int height) { } mCaptureBuffer.position(0); int[] frameBuffer = new int[1]; - GLES20.glGenFramebuffers(1, frameBuffer, 0); + GLES20.glGenFramebuffers(1,frameBuffer,0); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId); GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/opengl/ProgramTextureOES.java b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/opengl/ProgramTextureOES.java index 56fd0f840..c2667f4e7 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/opengl/ProgramTextureOES.java +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/opengl/ProgramTextureOES.java @@ -32,32 +32,29 @@ import java.nio.ByteBuffer; -/** - * The type Program texture oes. - */ public class ProgramTextureOES extends Program { // Simple vertex shader, used for all programs. private static final String VERTEX_SHADER = - "uniform mat4 uMVPMatrix;\n" - + "attribute vec4 aPosition;\n" - + "attribute vec2 aTextureCoord;\n" - + "varying vec2 vTextureCoord;\n" - + "void main() {\n" - + " gl_Position = uMVPMatrix * aPosition;\n" - + " vTextureCoord = aTextureCoord;\n" - + "}\n"; + "uniform mat4 uMVPMatrix;\n" + + "attribute vec4 aPosition;\n" + + "attribute vec2 aTextureCoord;\n" + + "varying vec2 vTextureCoord;\n" + + "void main() {\n" + + " gl_Position = uMVPMatrix * aPosition;\n" + + " vTextureCoord = aTextureCoord;\n" + + "}\n"; // Simple fragment shader for use with external 2D textures (e.g. what we get from // SurfaceTexture). private static final String FRAGMENT_SHADER_EXT = - "#extension GL_OES_EGL_image_external : require\n" - + "precision mediump float;\n" - + "varying vec2 vTextureCoord;\n" - + "uniform samplerExternalOES sTexture;\n" - + "void main() {\n" - + " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" - + "}\n"; + "#extension GL_OES_EGL_image_external : require\n" + + "precision mediump float;\n" + + "varying vec2 vTextureCoord;\n" + + "uniform samplerExternalOES sTexture;\n" + + "void main() {\n" + + " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" + + "}\n"; private int muMVPMatrixLoc; private int maPositionLoc; @@ -86,7 +83,7 @@ protected void getLocations() { } @Override - public void drawFrameOnScreen(int textureId, int width, int height, float[] mvpMatrix) { + public void drawFrameOnScreen(int textureId,int width, int height, float[] mvpMatrix) { GlUtil.checkGlError("draw start"); // Select the program. @@ -159,6 +156,7 @@ public int drawFrameOffScreen(int textureId, int width, int height, float[] mvpM GlUtil.checkGlError("glUniformMatrix4fv"); + // Enable the "aPosition" vertex attribute. GLES20.glEnableVertexAttribArray(maPositionLoc); GlUtil.checkGlError("glEnableVertexAttribArray"); @@ -194,28 +192,33 @@ public int drawFrameOffScreen(int textureId, int width, int height, float[] mvpM } - /** - * {en} + /** {zh} + * 读取渲染结果的buffer + * @param width 目标宽度 + * @param height 目标高度 + * @return 渲染结果的像素Buffer 格式RGBA + */ + /** {en} * Read the buffer - * - * @param width target width + * @param width target width * @param height target height * @return pixel Buffer format of the rendered result RGBA */ + @Override public ByteBuffer readBuffer(int textureId, int width, int height) { - if (textureId == GlUtil.NO_TEXTURE) { + if ( textureId == GlUtil.NO_TEXTURE) { return null; } - if (width * height == 0) { - return null; + if (width* height == 0){ + return null; } - ByteBuffer mCaptureBuffer = ByteBuffer.allocateDirect(width * height * 4); + ByteBuffer mCaptureBuffer = ByteBuffer.allocateDirect(width* height*4); mCaptureBuffer.position(0); int[] frameBuffer = new int[1]; - GLES20.glGenFramebuffers(1, frameBuffer, 0); + GLES20.glGenFramebuffers(1,frameBuffer,0); GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId); GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/opengl/ProgramTextureYUV.java b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/opengl/ProgramTextureYUV.java index 8b68d35ef..14a992368 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/opengl/ProgramTextureYUV.java +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/opengl/ProgramTextureYUV.java @@ -41,9 +41,6 @@ public class ProgramTextureYUV extends Program { private int mVTextureLoc; private int mVUTextureLoc; - /** - * Instantiates a new Program texture yuv. - */ public ProgramTextureYUV() { super(VERTEX, FRAGMENT); } @@ -72,17 +69,6 @@ protected void getLocations() { GlUtil.checkLocation(muMVPMatrixLoc, "vuTexture"); } - /** - * Draw frame off screen int. - * - * @param yTexture the y texture - * @param uTexture the u texture - * @param vTexture the v texture - * @param width the width - * @param height the height - * @param mvpMatrix the mvp matrix - * @return the int - */ public int drawFrameOffScreen(int yTexture, int uTexture, int vTexture, int width, int height, float[] mvpMatrix) { GlUtil.checkGlError("draw start"); @@ -138,16 +124,6 @@ public int drawFrameOffScreen(int yTexture, int uTexture, int vTexture, int widt return mFrameBufferTextures[0]; } - /** - * Draw frame off screen int. - * - * @param yTexture the y texture - * @param vuTexture the vu texture - * @param width the width - * @param height the height - * @param mvpMatrix the mvp matrix - * @return the int - */ public int drawFrameOffScreen(int yTexture, int vuTexture, int width, int height, float[] mvpMatrix) { GlUtil.checkGlError("draw start"); @@ -212,39 +188,33 @@ public ByteBuffer readBuffer(int textureId, int width, int height) { return null; } - /** - * The constant VERTEX. - */ - public static final String VERTEX = "uniform mat4 uMVPMatrix;\n" - + "attribute vec4 aPosition;\n" - + "attribute vec2 aTextureCoord;\n" - + "varying vec2 vTextureCoord;\n" - + "void main() {\n" - + " gl_Position = uMVPMatrix * aPosition;\n" - + " vTextureCoord = aTextureCoord;\n" - + "}\n"; - /** - * The constant FRAGMENT. - */ - public static final String FRAGMENT = "varying highp vec2 vTextureCoord;\n" - + " uniform sampler2D yTexture;\n" - + " uniform sampler2D vuTexture;\n" - + " uniform sampler2D uTexture;\n" - + " uniform sampler2D vTexture;\n" - + " void main()\n" - + " {\n" - + " mediump vec3 yuv;\n" - + " lowp vec3 rgb;\n" - + " yuv.x = texture2D(yTexture, vTextureCoord).a - 0.065;\n" - + " yuv.y = texture2D(vuTexture, vTextureCoord).a - 0.5;\n" - + " yuv.z = texture2D(vuTexture, vTextureCoord).r - 0.5;\n" -// + " rgb = mat3( 1, 1, 1,\n" -// + " 0, -.21482, 2.12798,\n" -// + " 1.28033, -.38059, 0) * yuv;\n" - + " rgb.x = yuv.x + 1.4075 * yuv.z;\n" - + " rgb.y = yuv.x - 0.3455 * yuv.y - 0.7169 * yuv.z;\n" - + " rgb.z = yuv.x + 1.779 * yuv.y;\n" -// + " gl_FragColor = vec4(rgb.x, rgb.y, rgb.z, 1);\n" - + " gl_FragColor = vec4(rgb.x, rgb.y, rgb.z, 1);\n" - + " }"; + public static final String VERTEX = "uniform mat4 uMVPMatrix;\n" + + "attribute vec4 aPosition;\n" + + "attribute vec2 aTextureCoord;\n" + + "varying vec2 vTextureCoord;\n" + + "void main() {\n" + + " gl_Position = uMVPMatrix * aPosition;\n" + + " vTextureCoord = aTextureCoord;\n" + + "}\n"; + public static final String FRAGMENT = "varying highp vec2 vTextureCoord;\n" + + " uniform sampler2D yTexture;\n" + + " uniform sampler2D vuTexture;\n" + + " uniform sampler2D uTexture;\n" + + " uniform sampler2D vTexture;\n" + + " void main()\n" + + " {\n" + + " mediump vec3 yuv;\n" + + " lowp vec3 rgb;\n" + + " yuv.x = texture2D(yTexture, vTextureCoord).a - 0.065;\n" + + " yuv.y = texture2D(vuTexture, vTextureCoord).a - 0.5;\n" + + " yuv.z = texture2D(vuTexture, vTextureCoord).r - 0.5;\n" + +// " rgb = mat3( 1, 1, 1,\n" + +// " 0, -.21482, 2.12798,\n" + +// " 1.28033, -.38059, 0) * yuv;\n" + + " rgb.x = yuv.x + 1.4075 * yuv.z;\n" + + " rgb.y = yuv.x - 0.3455 * yuv.y - 0.7169 * yuv.z;\n" + + " rgb.z = yuv.x + 1.779 * yuv.y;\n" + +// " gl_FragColor = vec4(rgb.x, rgb.y, rgb.z, 1);\n" + + " gl_FragColor = vec4(rgb.x, rgb.y, rgb.z, 1);\n" + + " }"; } diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/FaceUnityBeautyAPI.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/FaceUnityBeautyAPI.kt index f7a07a85e..1058ea229 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/FaceUnityBeautyAPI.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/FaceUnityBeautyAPI.kt @@ -31,37 +31,13 @@ import io.agora.base.VideoFrame import io.agora.rtc2.Constants import io.agora.rtc2.RtcEngine -/** - * Version - */ -const val VERSION = "1.0.3" +const val VERSION = "1.0.6" -/** - * Capture mode - * - * @constructor Create empty Capture mode - */ enum class CaptureMode{ - /** - * Agora - * - * @constructor Create empty Agora - */ Agora, // 使用声网内部的祼数据接口进行处理 - - /** - * Custom - * - * @constructor Create empty Custom - */ Custom // 自定义模式,需要自己调用onFrame接口将原始视频帧传给BeautyAPI做处理 } -/** - * I event callback - * - * @constructor Create empty I event callback - */ interface IEventCallback{ /** @@ -72,83 +48,27 @@ interface IEventCallback{ fun onBeautyStats(stats: BeautyStats) } -/** - * Beauty stats - * - * @property minCostMs - * @property maxCostMs - * @property averageCostMs - * @constructor Create empty Beauty stats - */ data class BeautyStats( val minCostMs:Long, // 统计区间内的最小值 val maxCostMs: Long, // 统计区间内的最大值 val averageCostMs: Long // 统计区间内的平均值 ) -/** - * Mirror mode - * - * @constructor Create empty Mirror mode - */ enum class MirrorMode { // 没有镜像正常画面的定义:前置拍到画面和手机看到画面是左右不一致的,后置拍到画面和手机看到画面是左右一致的 - /** - * Mirror Local Remote - * - * @constructor Create empty Mirror Local Remote - */ MIRROR_LOCAL_REMOTE, //本地远端都镜像,前置默认,本地和远端贴纸都正常 - - /** - * Mirror Local Only - * - * @constructor Create empty Mirror Local Only - */ MIRROR_LOCAL_ONLY, // 仅本地镜像,远端不镜像,,远端贴纸正常,本地贴纸镜像。用于打电话场景,电商直播场景(保证电商直播后面的告示牌文字是正的);这种模式因为本地远端是反的,所以肯定有一边的文字贴纸方向会是反的 - - /** - * Mirror Remote Only - * - * @constructor Create empty Mirror Remote Only - */ MIRROR_REMOTE_ONLY, // 仅远端镜像,本地不镜像,远端贴纸正常,本地贴纸镜像 - - /** - * Mirror None - * - * @constructor Create empty Mirror None - */ MIRROR_NONE // 本地远端都不镜像,后置默认,本地和远端贴纸都正常 } -/** - * Camera config - * - * @property frontMirror - * @property backMirror - * @constructor Create empty Camera config - */ data class CameraConfig( val frontMirror: MirrorMode = MirrorMode.MIRROR_LOCAL_REMOTE, // 前置默认镜像:本地远端都镜像 val backMirror: MirrorMode = MirrorMode.MIRROR_NONE // 后置默认镜像:本地远端都不镜像 ) -/** - * Config - * - * @property context - * @property rtcEngine - * @property fuRenderKit - * @property eventCallback - * @property captureMode - * @property statsDuration - * @property statsEnable - * @property cameraConfig - * @constructor Create empty Config - */ data class Config( val context: Context, // Android Context 上下文 val rtcEngine: RtcEngine, // 声网Rtc引擎 @@ -160,103 +80,23 @@ data class Config( val cameraConfig: CameraConfig = CameraConfig() // 摄像头镜像配置 ) -/** - * Error code - * - * @property value - * @constructor Create empty Error code - */ enum class ErrorCode(val value: Int) { - /** - * Error Ok - * - * @constructor Create empty Error Ok - */ ERROR_OK(0), // 一切正常 - - /** - * Error Has Not Initialized - * - * @constructor Create empty Error Has Not Initialized - */ ERROR_HAS_NOT_INITIALIZED(101), // 没有调用Initialize或调用失败情况下调用了其他API - - /** - * Error Has Initialized - * - * @constructor Create empty Error Has Initialized - */ ERROR_HAS_INITIALIZED(102), // 已经Initialize成功后再次调用报错 - - /** - * Error Has Released - * - * @constructor Create empty Error Has Released - */ ERROR_HAS_RELEASED(103), // 已经调用release销毁后还调用其他API - - /** - * Error Process Not Custom - * - * @constructor Create empty Error Process Not Custom - */ ERROR_PROCESS_NOT_CUSTOM(104), // 非Custom处理模式下调用onFrame接口从外部传入视频帧 - - /** - * Error Process Disable - * - * @constructor Create empty Error Process Disable - */ - ERROR_PROCESS_DISABLE(105), // 当调用enable(false)禁用美颜后调用onFrame接口返回 - - /** - * Error View Type Error - * - * @constructor Create empty Error View Type Error - */ - ERROR_VIEW_TYPE_ERROR(106), // 当调用setupLocalVideo时view类型错误时返回 - - /** - * Error Frame Skipped - * - * @constructor Create empty Error Frame Skipped - */ - ERROR_FRAME_SKIPPED(107), // 当处理帧忽略时在onFrame返回 + ERROR_VIEW_TYPE_ERROR(105), // 当调用setupLocalVideo时view类型错误时返回 + ERROR_FRAME_SKIPPED(106), // 当处理帧忽略时在onFrame返回 } -/** - * Beauty preset - * - * @constructor Create empty Beauty preset - */ enum class BeautyPreset { - /** - * Custom - * - * @constructor Create empty Custom - */ CUSTOM, // 不使用推荐的美颜参数 - - /** - * Default - * - * @constructor Create empty Default - */ DEFAULT // 默认的 } -/** - * Create face unity beauty a p i - * - * @return - */ fun createFaceUnityBeautyAPI(): FaceUnityBeautyAPI = FaceUnityBeautyAPIImpl() -/** - * Face unity beauty a p i - * - * @constructor Create empty Face unity beauty a p i - */ interface FaceUnityBeautyAPI { /** @@ -311,12 +151,19 @@ interface FaceUnityBeautyAPI { fun isFrontCamera(): Boolean /** - * Get mirror applied + * 获取镜像状态 * - * @return + * @return 镜像状态,true: 镜像,false:非镜像 */ fun getMirrorApplied(): Boolean + /** + * 在处理线程里执行操作 + * + * @param run 操作run + */ + fun runOnProcessThread(run: ()->Unit) + /** * 私参配置,用于不对外api的调用,多用于测试 */ diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/FaceUnityBeautyAPIImpl.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/FaceUnityBeautyAPIImpl.kt index 84fb33481..dd51417aa 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/FaceUnityBeautyAPIImpl.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/FaceUnityBeautyAPIImpl.kt @@ -45,10 +45,12 @@ import io.agora.base.VideoFrame import io.agora.base.VideoFrame.I420Buffer import io.agora.base.VideoFrame.SourceType import io.agora.base.VideoFrame.TextureBuffer +import io.agora.base.internal.video.EglBase import io.agora.base.internal.video.YuvHelper import io.agora.beautyapi.faceunity.utils.FuDeviceUtils import io.agora.beautyapi.faceunity.utils.LogUtils import io.agora.beautyapi.faceunity.utils.StatsHelper +import io.agora.beautyapi.faceunity.utils.egl.GLFrameBuffer import io.agora.beautyapi.faceunity.utils.egl.TextureProcessHelper import io.agora.rtc2.Constants import io.agora.rtc2.gl.EglBaseProvider @@ -56,18 +58,15 @@ import io.agora.rtc2.video.IVideoFrameObserver import io.agora.rtc2.video.VideoCanvas import java.io.File import java.nio.ByteBuffer +import java.util.Collections import java.util.concurrent.Callable -/** - * Face unity beauty a p i impl - * - * @constructor Create empty Face unity beauty a p i impl - */ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { private val TAG = "FaceUnityBeautyAPIImpl" private val reportId = "scenarioAPI" private val reportCategory = "beauty_android_$VERSION" - private var beautyMode = 0 // 0: 自动根据buffer类型切换,1:固定使用OES纹理,2:固定使用i420,3: 单纹理异步模式(自创) + private var beautyMode = 0 // 0: 自动根据buffer类型切换,1:固定使用OES纹理,2:固定使用i420,3: 单纹理模式 + private var enableTextureAsync = true // 是否开启纹理+异步缓存处理。对于GPU性能好的手机可以减小美颜处理耗时,对于中端机开启后效果也不明显。 private var textureBufferHelper: TextureBufferHelper? = null private var wrapTextureBufferHelper: TextureBufferHelper? = null @@ -84,32 +83,11 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { private var statsHelper: StatsHelper? = null private var skipFrame = 0 private enum class ProcessSourceType{ - /** - * Unknown - * - * @constructor Create empty Unknown - */ UNKNOWN, - - /** - * Texture Oes Async - * - * @constructor Create empty Texture Oes Async - */ TEXTURE_OES_ASYNC, - - /** - * Texture 2d Async - * - * @constructor Create empty Texture 2d Async - */ TEXTURE_2D_ASYNC, - - /** - * I420 - * - * @constructor Create empty I420 - */ + TEXTURE_OES, + TEXTURE_2D, I420 } private var currProcessSourceType = ProcessSourceType.UNKNOWN @@ -117,26 +95,22 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { private var isFrontCamera = true private var cameraConfig = CameraConfig() private var localVideoRenderMode = Constants.RENDER_MODE_HIDDEN + private val pendingProcessRunList = Collections.synchronizedList(mutableListOf<()->Unit>()) + private val transformGLFrameBuffer = GLFrameBuffer() - /** - * Initialize - * - * @param config - * @return - */ override fun initialize(config: Config): Int { if (this.config != null) { LogUtils.e(TAG, "initialize >> The beauty api has been initialized!") return ErrorCode.ERROR_HAS_INITIALIZED.value } this.config = config + this.cameraConfig = config.cameraConfig if (config.captureMode == CaptureMode.Agora) { config.rtcEngine.registerVideoFrameObserver(this) } statsHelper = StatsHelper(config.statsDuration){ this.config?.eventCallback?.onBeautyStats(it) } - LogUtils.setLogFilePath(config.context.getExternalFilesDir("")?.absolutePath ?: "") LogUtils.i(TAG, "initialize >> config = $config") LogUtils.i(TAG, "initialize >> beauty api version=$VERSION, beauty sdk version=${FURenderKit.getInstance().getVersion()}") @@ -153,12 +127,6 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { return ErrorCode.ERROR_OK.value } - /** - * Enable - * - * @param enable - * @return - */ override fun enable(enable: Boolean): Int { LogUtils.i(TAG, "enable >> enable = $enable") if (config == null) { @@ -183,13 +151,6 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { return ErrorCode.ERROR_OK.value } - /** - * Setup local video - * - * @param view - * @param renderMode - * @return - */ override fun setupLocalVideo(view: View, renderMode: Int): Int { val rtcEngine = config?.rtcEngine if(rtcEngine == null){ @@ -208,12 +169,6 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { return ErrorCode.ERROR_VIEW_TYPE_ERROR.value } - /** - * On frame - * - * @param videoFrame - * @return - */ override fun onFrame(videoFrame: VideoFrame): Int { val conf = config if(conf == null){ @@ -228,9 +183,6 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { LogUtils.e(TAG, "onFrame >> The capture mode is not Custom!") return ErrorCode.ERROR_PROCESS_NOT_CUSTOM.value } - if (!enable) { - return ErrorCode.ERROR_PROCESS_DISABLE.value - } if (processBeauty(videoFrame)) { return ErrorCode.ERROR_OK.value } @@ -238,12 +190,6 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { return ErrorCode.ERROR_FRAME_SKIPPED.value } - /** - * Update camera config - * - * @param config - * @return - */ override fun updateCameraConfig(config: CameraConfig): Int { LogUtils.i(TAG, "updateCameraConfig >> oldCameraConfig=$cameraConfig, newCameraConfig=$config") cameraConfig = CameraConfig(config.frontMirror, config.backMirror) @@ -252,30 +198,33 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { return ErrorCode.ERROR_OK.value } - /** - * Is front camera - * - */ + override fun runOnProcessThread(run: () -> Unit) { + if (config == null) { + LogUtils.e(TAG, "runOnProcessThread >> The beauty api has not been initialized!") + return + } + if (isReleased) { + LogUtils.e(TAG, "runOnProcessThread >> The beauty api has been released!") + return + } + if (textureBufferHelper?.handler?.looper?.thread == Thread.currentThread()) { + run.invoke() + } else if (textureBufferHelper != null) { + textureBufferHelper?.handler?.post(run) + } else { + pendingProcessRunList.add(run) + } + } + override fun isFrontCamera() = isFrontCamera - /** - * Set parameters - * - * @param key - * @param value - */ override fun setParameters(key: String, value: String) { when(key){ "beauty_mode" -> beautyMode = value.toInt() + "enableTextureAsync" -> enableTextureAsync = value.toBoolean() } } - /** - * Set beauty preset - * - * @param preset - * @return - */ override fun setBeautyPreset(preset: BeautyPreset): Int { val conf = config if(conf == null){ @@ -290,8 +239,7 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { LogUtils.i(TAG, "setBeautyPreset >> preset = $preset") config?.rtcEngine?.sendCustomReportMessage(reportId, reportCategory, "enable", "preset=$preset", 0) - val recommendFaceBeauty = - FaceBeauty(FUBundleData("graphics" + File.separator + "face_beautification.bundle")) + val recommendFaceBeauty = FaceBeauty(FUBundleData("graphics" + File.separator + "face_beautification.bundle")) if (preset == BeautyPreset.DEFAULT) { recommendFaceBeauty.filterName = FaceBeautyFilterEnum.FENNEN_1 recommendFaceBeauty.filterIntensity = 0.7 @@ -341,13 +289,9 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { return ErrorCode.ERROR_OK.value } - /** - * Release - * - * @return - */ override fun release(): Int { - val fuRenderer = config?.fuRenderKit + val conf = config + val fuRenderer = conf?.fuRenderKit if(fuRenderer == null){ LogUtils.e(TAG, "release >> The beauty api has not been initialized!") return ErrorCode.ERROR_HAS_NOT_INITIALIZED.value @@ -357,15 +301,20 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { return ErrorCode.ERROR_HAS_RELEASED.value } LogUtils.i(TAG, "release") - config?.rtcEngine?.sendCustomReportMessage(reportId, reportCategory, "release", "", 0) + if (conf.captureMode == CaptureMode.Agora) { + conf.rtcEngine.registerVideoFrameObserver(null) + } + conf.rtcEngine.sendCustomReportMessage(reportId, reportCategory, "release", "", 0) isReleased = true textureBufferHelper?.let { textureBufferHelper = null + it.handler.removeCallbacksAndMessages(null) it.invoke { fuRenderer.release() mTextureProcessHelper?.release() mTextureProcessHelper = null + transformGLFrameBuffer.release() null } // it.handler.looper.quit() @@ -377,6 +326,7 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { } statsHelper?.reset() statsHelper = null + pendingProcessRunList.clear() return ErrorCode.ERROR_OK.value } @@ -459,6 +409,15 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { "FURender", EglBaseProvider.instance().rootEglBase.eglBaseContext ) + textureBufferHelper?.invoke { + synchronized(pendingProcessRunList){ + val iterator = pendingProcessRunList.iterator() + while (iterator.hasNext()){ + iterator.next().invoke() + iterator.remove() + } + } + } LogUtils.i(TAG, "processBeauty >> create texture buffer, beautyMode=$beautyMode") } if (wrapTextureBufferHelper == null) { @@ -471,7 +430,13 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { val startTime = System.currentTimeMillis() val processTexId = when (beautyMode) { 2 -> processBeautySingleBuffer(videoFrame) - 3 -> processBeautySingleTextureAsync(videoFrame) + 3 -> { + if (enableTextureAsync) { + processBeautySingleTextureAsync(videoFrame) + } else { + processBeautySingleTexture(videoFrame) + } + } else -> processBeautyAuto(videoFrame) } @@ -505,7 +470,11 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { private fun processBeautyAuto(videoFrame: VideoFrame): Int { val buffer = videoFrame.buffer return if (buffer is TextureBuffer) { - processBeautySingleTextureAsync(videoFrame) + if (enableTextureAsync) { + processBeautySingleTextureAsync(videoFrame) + } else { + processBeautySingleTexture(videoFrame) + } } else { processBeautySingleBuffer(videoFrame) } @@ -555,22 +524,25 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { it.cameraFacing = CameraFacingEnum.CAMERA_FRONT it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0 it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0 - it.outputMatrix = FUTransformMatrixEnum.CCROT0 + it.outputMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL it.deviceOrientation = 270 } else { it.cameraFacing = CameraFacingEnum.CAMERA_BACK it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0 it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0 - it.outputMatrix = FUTransformMatrixEnum.CCROT0 + it.outputMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL it.deviceOrientation = 270 } } if (isReleased) { return@setFilter -1 } - return@setFilter textureBufferHelper?.invoke { - return@invoke fuRenderKit.renderWithInput(input).texture?.texId ?: -1 - } ?: -1 + val ret = textureBufferHelper?.invoke { + synchronized(EglBase.lock){ + return@invoke fuRenderKit.renderWithInput(input).texture?.texId ?: -1 + } + } + return@setFilter ret ?: -1 } } @@ -595,6 +567,87 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { } } + private fun processBeautySingleTexture(videoFrame: VideoFrame): Int { + val texBufferHelper = textureBufferHelper ?: return -1 + val textureBuffer = videoFrame.buffer as? TextureBuffer ?: return -1 + + when(textureBuffer.type){ + TextureBuffer.Type.OES -> { + if(currProcessSourceType != ProcessSourceType.TEXTURE_OES){ + LogUtils.i(TAG, "processBeauty >> process source type change old=$currProcessSourceType, new=${ProcessSourceType.TEXTURE_OES}") + if (currProcessSourceType != ProcessSourceType.UNKNOWN) { + skipFrame = 3 + } + currProcessSourceType = ProcessSourceType.TEXTURE_OES + return -1 + } + } + else -> { + if(currProcessSourceType != ProcessSourceType.TEXTURE_2D){ + LogUtils.i(TAG, "processBeauty >> process source type change old=$currProcessSourceType, new=${ProcessSourceType.TEXTURE_2D}") + if (currProcessSourceType != ProcessSourceType.UNKNOWN) { + skipFrame = 3 + } + currProcessSourceType = ProcessSourceType.TEXTURE_2D + skipFrame = 6 + return -1 + } + } + } + + val width = videoFrame.rotatedWidth + val height = videoFrame.rotatedHeight + val isFront = videoFrame.sourceType == SourceType.kFrontCamera + val rotation = videoFrame.rotation + + return texBufferHelper.invoke { + val fuRenderKit = config?.fuRenderKit ?: return@invoke -1 + + transformGLFrameBuffer.setSize(width, height) + transformGLFrameBuffer.resetTransform() + transformGLFrameBuffer.setTexMatrix(textureBuffer.transformMatrixArray) + transformGLFrameBuffer.setRotation(rotation) + var flipH = isFront + if((isFrontCamera && !captureMirror) || (!isFrontCamera && captureMirror)){ + flipH = !flipH + } + transformGLFrameBuffer.setFlipH(flipH) + val transformTexId = transformGLFrameBuffer.process( + textureBuffer.textureId, when (textureBuffer.type) { + TextureBuffer.Type.OES -> GLES11Ext.GL_TEXTURE_EXTERNAL_OES + else -> GLES20.GL_TEXTURE_2D + } + ) + + val input = FURenderInputData(width, height) + input.texture = FURenderInputData.FUTexture( + FUInputTextureEnum.FU_ADM_FLAG_COMMON_TEXTURE, + transformTexId + ) + input.renderConfig.let { + if (isFront) { + it.cameraFacing = CameraFacingEnum.CAMERA_FRONT + it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0 + it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0 + it.outputMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL + it.deviceOrientation = 270 + } else { + it.cameraFacing = CameraFacingEnum.CAMERA_BACK + it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0 + it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0 + it.outputMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL + it.deviceOrientation = 270 + } + } + if (isReleased) { + return@invoke -1 + } + synchronized(EglBase.lock){ + return@invoke fuRenderKit.renderWithInput(input).texture?.texId ?: -1 + } + } + } + private fun processBeautySingleBuffer(videoFrame: VideoFrame): Int { val texBufferHelper = textureBufferHelper ?: return -1 if(currProcessSourceType != ProcessSourceType.I420){ @@ -611,6 +664,7 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { val height = buffer.height val isFront = videoFrame.sourceType == SourceType.kFrontCamera val mirror = (isFrontCamera && !captureMirror) || (!isFrontCamera && captureMirror) + val rotation = videoFrame.rotation return texBufferHelper.invoke(Callable { if(isReleased){ @@ -625,13 +679,71 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { input.renderConfig.let { if (isFront) { it.cameraFacing = CameraFacingEnum.CAMERA_FRONT - it.inputBufferMatrix = if(mirror) FUTransformMatrixEnum.CCROT90 else FUTransformMatrixEnum.CCROT90_FLIPHORIZONTAL - it.inputTextureMatrix = if(mirror) FUTransformMatrixEnum.CCROT90 else FUTransformMatrixEnum.CCROT90_FLIPHORIZONTAL + it.inputBufferMatrix = if(mirror) { + when (rotation) { + 0 -> FUTransformMatrixEnum.CCROT0 + 180 -> FUTransformMatrixEnum.CCROT180 + else -> FUTransformMatrixEnum.CCROT90 + } + } else { + when (rotation) { + 0 -> FUTransformMatrixEnum.CCROT0_FLIPHORIZONTAL + 180 -> FUTransformMatrixEnum.CCROT0_FLIPVERTICAL + else -> FUTransformMatrixEnum.CCROT90_FLIPHORIZONTAL + } + } + it.inputTextureMatrix = if(mirror) { + when (rotation) { + 0 -> FUTransformMatrixEnum.CCROT0 + 180 -> FUTransformMatrixEnum.CCROT180 + else -> FUTransformMatrixEnum.CCROT90 + } + } else { + when (rotation) { + 0 -> FUTransformMatrixEnum.CCROT0_FLIPHORIZONTAL + 180 -> FUTransformMatrixEnum.CCROT0_FLIPVERTICAL + else -> FUTransformMatrixEnum.CCROT90_FLIPHORIZONTAL + } + } + it.deviceOrientation = when(rotation){ + 0 -> 270 + 180 -> 90 + else -> 0 + } it.outputMatrix = FUTransformMatrixEnum.CCROT0 } else { it.cameraFacing = CameraFacingEnum.CAMERA_BACK - it.inputBufferMatrix = if(mirror) FUTransformMatrixEnum.CCROT90_FLIPVERTICAL else FUTransformMatrixEnum.CCROT270 - it.inputTextureMatrix = if(mirror) FUTransformMatrixEnum.CCROT90_FLIPVERTICAL else FUTransformMatrixEnum.CCROT270 + it.inputBufferMatrix = if(mirror) { + when (rotation) { + 0 -> FUTransformMatrixEnum.CCROT0_FLIPHORIZONTAL + 180 -> FUTransformMatrixEnum.CCROT0_FLIPVERTICAL + else -> FUTransformMatrixEnum.CCROT90_FLIPVERTICAL + } + } else { + when (rotation) { + 0 -> FUTransformMatrixEnum.CCROT0 + 180 -> FUTransformMatrixEnum.CCROT180 + else -> FUTransformMatrixEnum.CCROT270 + } + } + it.inputTextureMatrix = if(mirror) { + when (rotation) { + 0 -> FUTransformMatrixEnum.CCROT0_FLIPHORIZONTAL + 180 -> FUTransformMatrixEnum.CCROT0_FLIPVERTICAL + else -> FUTransformMatrixEnum.CCROT90_FLIPVERTICAL + } + } else { + when (rotation) { + 0 -> FUTransformMatrixEnum.CCROT0 + 180 -> FUTransformMatrixEnum.CCROT180 + else -> FUTransformMatrixEnum.CCROT270 + } + } + it.deviceOrientation = when(rotation){ + 0 -> 270 + 180 -> 90 + else -> 0 + } it.outputMatrix = FUTransformMatrixEnum.CCROT0 } } @@ -642,7 +754,9 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { return@Callable -1 } } - return@Callable fuRenderKit.renderWithInput(input).texture?.texId ?: -1 + synchronized(EglBase.lock){ + return@Callable fuRenderKit.renderWithInput(input).texture?.texId ?: -1 + } }) } @@ -676,75 +790,29 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { // IVideoFrameObserver implements - /** - * On capture video frame - * - * @param sourceType - * @param videoFrame - * @return - */ override fun onCaptureVideoFrame(sourceType: Int, videoFrame: VideoFrame?): Boolean { videoFrame ?: return false return processBeauty(videoFrame) } - /** - * On pre encode video frame - * - * @param sourceType - * @param videoFrame - */ override fun onPreEncodeVideoFrame(sourceType: Int, videoFrame: VideoFrame?) = false - /** - * On media player video frame - * - * @param videoFrame - * @param mediaPlayerId - */ override fun onMediaPlayerVideoFrame(videoFrame: VideoFrame?, mediaPlayerId: Int) = false - /** - * On render video frame - * - * @param channelId - * @param uid - * @param videoFrame - */ override fun onRenderVideoFrame( channelId: String?, uid: Int, videoFrame: VideoFrame? ) = false - /** - * Get video frame process mode - * - */ override fun getVideoFrameProcessMode() = IVideoFrameObserver.PROCESS_MODE_READ_WRITE - /** - * Get video format preference - * - */ override fun getVideoFormatPreference() = IVideoFrameObserver.VIDEO_PIXEL_DEFAULT - /** - * Get rotation applied - * - */ override fun getRotationApplied() = false - /** - * Get mirror applied - * - */ override fun getMirrorApplied() = captureMirror && !enable - /** - * Get observed frame position - * - */ override fun getObservedFramePosition() = IVideoFrameObserver.POSITION_POST_CAPTURER } \ No newline at end of file diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/FuDeviceUtils.java b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/FuDeviceUtils.java index 8e7397963..5e03a313c 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/FuDeviceUtils.java +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/FuDeviceUtils.java @@ -40,31 +40,12 @@ import java.io.InputStream; import java.io.InputStreamReader; -/** - * The type Fu device utils. - */ -public final class FuDeviceUtils { - - private FuDeviceUtils() { - - } +public class FuDeviceUtils { - /** - * The constant TAG. - */ public static final String TAG = "FuDeviceUtils"; - /** - * The constant DEVICE_LEVEL_HIGH. - */ public static final int DEVICE_LEVEL_HIGH = 2; - /** - * The constant DEVICE_LEVEL_MID. - */ public static final int DEVICE_LEVEL_MID = 1; - /** - * The constant DEVICE_LEVEL_LOW. - */ public static final int DEVICE_LEVEL_LOW = 0; /** @@ -167,9 +148,7 @@ public static int getCPUMaxFreqKHz() { try { int freqBound = parseFileForValue("cpu MHz", stream); freqBound *= 1024; //MHz -> kHz - if (freqBound > maxFreq) { - maxFreq = freqBound; - } + if (freqBound > maxFreq) maxFreq = freqBound; } finally { stream.close(); } @@ -266,9 +245,7 @@ private static int parseFileForValue(String textToMatch, FileInputStream stream) int length = stream.read(buffer); for (int i = 0; i < length; i++) { if (buffer[i] == '\n' || i == 0) { - if (buffer[i] == '\n') { - i++; - } + if (buffer[i] == '\n') i++; for (int j = i; j < length; j++) { int textIndex = j - i; //Text doesn't match query at some point. @@ -293,7 +270,6 @@ private static int parseFileForValue(String textToMatch, FileInputStream stream) * Helper method used by {@link #parseFileForValue(String, FileInputStream) parseFileForValue}. Parses * the next available number after the match in the file being read and returns it as an integer. * - * @param buffer Buffer. * @param index - The index in the buffer array to begin looking. * @return The next number on that line in the buffer, returned as an int. Returns * DEVICEINFO_UNKNOWN = -1 in the event that no more numbers exist on the same line. @@ -317,8 +293,8 @@ private static int extractValue(byte[] buffer, int index) { /** * 获取当前剩余内存(ram) * - * @param context the context - * @return avail memory + * @param context + * @return */ public static long getAvailMemory(Context context) { ActivityManager am = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE); @@ -330,7 +306,7 @@ public static long getAvailMemory(Context context) { /** * 获取厂商信息 * - * @return brand + * @return */ public static String getBrand() { return Build.BRAND; @@ -339,7 +315,7 @@ public static String getBrand() { /** * 获取手机机型 * - * @return model + * @return */ public static String getModel() { return Build.MODEL; @@ -348,7 +324,7 @@ public static String getModel() { /** * 获取硬件信息(cpu型号) * - * @return hard ware + * @return */ public static String getHardWare() { try { @@ -377,15 +353,13 @@ public static String getHardWare() { * Level judgement based on current memory and CPU. * * @param context - Context object. - * @return int + * @return */ public static int judgeDeviceLevel(Context context) { int level; //有一些设备不符合下述的判断规则,则走一个机型判断模式 int specialDevice = judgeDeviceLevelInDeviceName(); - if (specialDevice >= 0) { - return specialDevice; - } + if (specialDevice >= 0) return specialDevice; int ramLevel = judgeMemory(context); int cpuLevel = judgeCPU(); @@ -398,30 +372,29 @@ public static int judgeDeviceLevel(Context context) { level = DEVICE_LEVEL_MID; } } - LogUtils.d(TAG, "DeviceLevel: " + level); + LogUtils.d(TAG,"DeviceLevel: " + level); return level; } /** * -1 不是特定的高低端机型 - * - * @return level. + * @return */ private static int judgeDeviceLevelInDeviceName() { String currentDeviceName = getDeviceName(); - for (String deviceName : UPSCALE_DEVICE) { + for (String deviceName:upscaleDevice) { if (deviceName.equals(currentDeviceName)) { return DEVICE_LEVEL_HIGH; } } - for (String deviceName : MIDDLE_DEVICES) { + for (String deviceName:middleDevice) { if (deviceName.equals(currentDeviceName)) { return DEVICE_LEVEL_MID; } } - for (String deviceName : LOW_DEVICES) { + for (String deviceName:lowDevice) { if (deviceName.equals(currentDeviceName)) { return DEVICE_LEVEL_LOW; } @@ -429,24 +402,14 @@ private static int judgeDeviceLevelInDeviceName() { return -1; } - /** - * The constant upscaleDevice. - */ - public static final String[] UPSCALE_DEVICE = {"vivo X6S A", "MHA-AL00", "VKY-AL00", "V1838A"}; - /** - * The constant lowDevice. - */ - public static final String[] LOW_DEVICES = {}; - /** - * The constant middleDevice. - */ - public static final String[] MIDDLE_DEVICES = {"OPPO R11s", "PAR-AL00", "MI 8 Lite", "ONEPLUS A6000", "PRO 6", "PRO 7 Plus"}; + public static final String[] upscaleDevice = {"vivo X6S A","MHA-AL00","VKY-AL00","V1838A"}; + public static final String[] lowDevice = {}; + public static final String[] middleDevice = {"OPPO R11s","PAR-AL00","MI 8 Lite","ONEPLUS A6000","PRO 6","PRO 7 Plus"}; /** * 评定内存的等级. * - * @param context Context. - * @return level. + * @return */ private static int judgeMemory(Context context) { long ramMB = getTotalMemory(context) / (1024 * 1024); @@ -468,7 +431,7 @@ private static int judgeMemory(Context context) { /** * 评定CPU等级.(按频率和厂商型号综合判断) * - * @return level. + * @return */ private static int judgeCPU() { int level = 0; @@ -482,8 +445,7 @@ private static int judgeCPU() { return judgeQualcommCPU(cpuName, freqMHz); } else if (cpuName.contains("hi") || cpuName.contains("kirin")) { //海思麒麟 return judgeSkinCPU(cpuName, freqMHz); - } else if (cpuName.contains("MT")) { - //联发科 + } else if (cpuName.contains("MT")) {//联发科 return judgeMTCPU(cpuName, freqMHz); } } @@ -504,9 +466,7 @@ private static int judgeCPU() { /** * 联发科芯片等级判定 * - * @param cpuName CPU Name. - * @param freqMHz CPU Freq MHz. - * @return level + * @return */ private static int judgeMTCPU(String cpuName, int freqMHz) { //P60之前的全是低端机 MT6771V/C @@ -548,8 +508,8 @@ private static int judgeMTCPU(String cpuName, int freqMHz) { /** * 通过联发科CPU型号定义 -> 获取cpu version * - * @param cpuName CPU Name. - * @return CPU Version. + * @param cpuName + * @return */ private static int getMTCPUVersion(String cpuName) { //截取MT后面的四位数字 @@ -569,9 +529,7 @@ private static int getMTCPUVersion(String cpuName) { /** * 高通骁龙芯片等级判定 * - * @param cpuName CPU Name. - * @param freqMHz CPU Freq MHz. - * @return level + * @return */ private static int judgeQualcommCPU(String cpuName, int freqMHz) { int level = 0; @@ -603,9 +561,8 @@ private static int judgeQualcommCPU(String cpuName, int freqMHz) { /** * 麒麟芯片等级判定 * - * @param cpuName CPU Name. - * @param freqMHz CPU Freq MHz. - * @return level + * @param freqMHz + * @return */ private static int judgeSkinCPU(String cpuName, int freqMHz) { //型号 -> kirin710之后 & 最高核心频率 @@ -633,22 +590,17 @@ private static int judgeSkinCPU(String cpuName, int freqMHz) { return level; } - /** - * The constant NEXUS_6P. - */ - public static final String NEXUS_6P = "Nexus 6P"; + public static final String Nexus_6P = "Nexus 6P"; /** - * 获取设备名。 + * 获取设备名 * - * @return the device name + * @return */ public static String getDeviceName() { String deviceName = ""; - if (Build.MODEL != null) { - deviceName = Build.MODEL; - } - LogUtils.e(TAG, "deviceName: " + deviceName); + if (Build.MODEL != null) deviceName = Build.MODEL; + LogUtils.e(TAG,"deviceName: " + deviceName); return deviceName; } } diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/LogUtils.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/LogUtils.kt index 4722d73a7..4c1a5252d 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/LogUtils.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/LogUtils.kt @@ -24,118 +24,34 @@ package io.agora.beautyapi.faceunity.utils -import android.util.Log -import java.io.File -import java.io.FileOutputStream -import java.text.SimpleDateFormat -import java.util.Date -import java.util.Locale -import java.util.concurrent.Executors +import io.agora.base.internal.Logging -/** - * Log utils - * - * @constructor Create empty Log utils - */ object LogUtils { private const val beautyType = "FaceUnity" - private val timeFormat = SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS", Locale.ROOT) - private val logFileName = "agora_beautyapi_${beautyType.toLowerCase(Locale.US)}_android.log" - private val workerThread = Executors.newSingleThreadExecutor() - private var logOutputStream: FileOutputStream? = null - /** - * Set log file path - * - * @param path - */ - @JvmStatic - fun setLogFilePath(path: String){ - if(path.isEmpty()){ - e("LogUtils", "setLogFilePath >> path is empty!") - return - } - val direction = File(path) - if(!direction.exists()){ - direction.mkdirs() - } - val file = File(direction, logFileName) - if(!file.exists()){ - file.createNewFile() - } - val append = file.length() < 2 * 1024 * 1024 - logOutputStream = FileOutputStream(file, append) - } - - /** - * I - * - * @param tag - * @param content - * @param args - */ @JvmStatic fun i(tag: String, content: String, vararg args: Any) { val consoleMessage = "[BeautyAPI][$beautyType] : ${String.format(content, args)}" - val fileMessage = "${timeFormat.format(Date())} : [BeautyAPI][$beautyType][$tag][INFO] : ${String.format(content, args)}" - Log.v(tag, consoleMessage) - saveToFile(fileMessage) + Logging.log(Logging.Severity.LS_INFO, tag, consoleMessage) } - /** - * D - * - * @param tag - * @param content - * @param args - */ @JvmStatic fun d(tag: String, content: String, vararg args: Any) { val consoleMessage = "[BeautyAPI][$beautyType] : ${String.format(content, args)}" - val fileMessage = "${timeFormat.format(Date())} : [BeautyAPI][$beautyType][$tag][DEBUG] : ${String.format(content, args)}" - Log.d(tag, consoleMessage) - saveToFile(fileMessage) + Logging.d(tag, consoleMessage) } - /** - * W - * - * @param tag - * @param content - * @param args - */ @JvmStatic fun w(tag: String, content: String, vararg args: Any){ val consoleMessage = "[BeautyAPI][$beautyType] : ${String.format(content, args)}" - val fileMessage = "${timeFormat.format(Date())} : [BeautyAPI][$beautyType][$tag][WARN] : ${String.format(content, args)}" - Log.w(tag, consoleMessage) - saveToFile(fileMessage) + Logging.w(tag, consoleMessage) } - /** - * E - * - * @param tag - * @param content - * @param args - */ @JvmStatic fun e(tag: String, content: String, vararg args: Any){ val consoleMessage = "[BeautyAPI][$beautyType] : ${String.format(content, args)}" - val fileMessage = "${timeFormat.format(Date())} : [BeautyAPI][$beautyType][$tag][ERROR] : ${String.format(content, args)}" - Log.e(tag, consoleMessage) - saveToFile(fileMessage) + Logging.e(tag, consoleMessage) } - - private fun saveToFile(message: String){ - val outputStream = logOutputStream ?: return - workerThread.execute { - outputStream.write(message.toByteArray()) - if(!message.endsWith("\n")){ - outputStream.write("\n".toByteArray()) - } - } - } } \ No newline at end of file diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/StatsHelper.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/StatsHelper.kt index 6f2dacf46..cb4cf1292 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/StatsHelper.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/StatsHelper.kt @@ -30,13 +30,6 @@ import io.agora.beautyapi.faceunity.BeautyStats import kotlin.math.max import kotlin.math.min -/** - * Stats helper - * - * @property statsDuration - * @property onStatsChanged - * @constructor Create empty Stats helper - */ class StatsHelper( private val statsDuration: Long, private val onStatsChanged: (BeautyStats) -> Unit @@ -48,11 +41,6 @@ class StatsHelper( private var mCostMax = 0L private var mCostMin = Long.MAX_VALUE - /** - * Once - * - * @param cost - */ fun once(cost: Long) { val curr = System.currentTimeMillis() if (mStartTime == 0L) { @@ -80,10 +68,6 @@ class StatsHelper( mCostMin = min(mCostMin, cost) } - /** - * Reset - * - */ fun reset() { mMainHandler.removeCallbacksAndMessages(null) mStartTime = 0 diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/EGLContextHelper.java b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/EGLContextHelper.java index b3717d609..97b3c7a53 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/EGLContextHelper.java +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/EGLContextHelper.java @@ -36,9 +36,6 @@ import io.agora.beautyapi.faceunity.utils.LogUtils; -/** - * The type Egl context helper. - */ public class EGLContextHelper { private static final String DEBUG_TAG = "EGLContextManager"; private final int mRedSize = 8; @@ -48,23 +45,12 @@ public class EGLContextHelper { private final int mDepthSize = 16; private final int mStencilSize = 0; private final int mRenderType = 4; + public EGLContextHelper(){} - /** - * Instantiates a new Egl context helper. - */ - public EGLContextHelper() { - } - - /** - * Init egl. - * - * @param shareContext the share context - * @throws Exception the exception - */ public void initEGL(EGLContext shareContext) throws Exception { mEGL = (EGL10) GLDebugHelper.wrap(EGLContext.getEGL(), GLDebugHelper.CONFIG_CHECK_GL_ERROR - | GLDebugHelper.CONFIG_CHECK_THREAD, null); + | GLDebugHelper.CONFIG_CHECK_THREAD, null); if (mEGL == null) { throw new Exception("Couldn't get EGL"); @@ -83,8 +69,8 @@ public void initEGL(EGLContext shareContext) throws Exception { + curGLVersion[1]); int[] num_config = new int[1]; - if (!mEGL.eglChooseConfig(mGLDisplay, mConfigSpec, null, 1, - num_config)) { + if(!mEGL.eglChooseConfig(mGLDisplay, mConfigSpec, null, 1, + num_config)){ throw new IllegalArgumentException("eglChooseConfig failed"); } int numConfigs = num_config[0]; @@ -129,75 +115,32 @@ public void initEGL(EGLContext shareContext) throws Exception { } - /** - * Gets egl context. - * - * @return the egl context - */ public EGLContext getEGLContext() { return mGLContext; } - /** - * Gets gl display. - * - * @return the gl display - */ public EGLDisplay getGLDisplay() { return mGLDisplay; } - /** - * Gets gl config. - * - * @return the gl config - */ public EGLConfig getGLConfig() { return mGLConfig; } - /** - * Gets gl surface. - * - * @return the gl surface - */ public EGLSurface getGLSurface() { return mGLSurface; } - /** - * Gets egl. - * - * @return the egl - */ public EGL10 getEGL() { return mEGL; } - /** - * The M egl. - */ EGL10 mEGL; - /** - * The M gl display. - */ EGLDisplay mGLDisplay; - /** - * The M gl config. - */ EGLConfig mGLConfig; - /** - * The M gl surface. - */ EGLSurface mGLSurface; - /** - * The M gl context. - */ EGLContext mGLContext; - /** - * The M config spec. - */ int[] mConfigSpec = new int[]{ EGL10.EGL_RED_SIZE, mRedSize, EGL10.EGL_GREEN_SIZE, mGreenSize, @@ -205,12 +148,9 @@ public EGL10 getEGL() { EGL10.EGL_ALPHA_SIZE, mAlphaSize, EGL10.EGL_DEPTH_SIZE, mDepthSize, EGL10.EGL_STENCIL_SIZE, mStencilSize, - EGL10.EGL_RENDERABLE_TYPE, mRenderType, //egl版本 2.0 + EGL10.EGL_RENDERABLE_TYPE, mRenderType,//egl版本 2.0 EGL10.EGL_NONE}; - /** - * Release. - */ public void release() { mEGL.eglMakeCurrent(mGLDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT); @@ -221,25 +161,15 @@ public void release() { LogUtils.i(DEBUG_TAG, "GL Cleaned up"); } - /** - * Egl make current boolean. - * - * @return the boolean - */ - public boolean eglMakeCurrent() { - if (mGLContext == EGL10.EGL_NO_CONTEXT) { + public boolean eglMakeCurrent(){ + if(mGLContext == EGL10.EGL_NO_CONTEXT){ return false; - } else { + }else{ return mEGL.eglMakeCurrent(mGLDisplay, mGLSurface, mGLSurface, mGLContext); } } - /** - * Egl make no current boolean. - * - * @return the boolean - */ - public boolean eglMakeNoCurrent() { + public boolean eglMakeNoCurrent(){ return mEGL.eglMakeCurrent(mGLDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT); } @@ -251,7 +181,7 @@ private EGLConfig chooseConfig(EGL10 egl, EGLDisplay display, EGL10.EGL_DEPTH_SIZE, 0); int s = findConfigAttrib(egl, display, config, EGL10.EGL_STENCIL_SIZE, 0); - if (d >= mDepthSize && s >= mStencilSize) { + if ((d >= mDepthSize) && (s >= mStencilSize)) { int r = findConfigAttrib(egl, display, config, EGL10.EGL_RED_SIZE, 0); int g = findConfigAttrib(egl, display, config, @@ -260,8 +190,8 @@ private EGLConfig chooseConfig(EGL10 egl, EGLDisplay display, EGL10.EGL_BLUE_SIZE, 0); int a = findConfigAttrib(egl, display, config, EGL10.EGL_ALPHA_SIZE, 0); - if (r == mRedSize && g == mGreenSize - && b == mBlueSize && a == mAlphaSize) { + if ((r == mRedSize) && (g == mGreenSize) + && (b == mBlueSize) && (a == mAlphaSize)) { return config; } } diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLCopyHelper.java b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLCopyHelper.java index 6f92d1474..b475f39d9 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLCopyHelper.java +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLCopyHelper.java @@ -28,51 +28,31 @@ import android.opengl.GLES20; import android.opengl.GLES30; -/** - * The type Gl copy helper. - */ public class GLCopyHelper { private final int bufferCount; - /** - * Instantiates a new Gl copy helper. - */ - public GLCopyHelper() { + public GLCopyHelper(){ this(1); } - /** - * Instantiates a new Gl copy helper. - * - * @param bufferCount the buffer count - */ - public GLCopyHelper(int bufferCount) { + public GLCopyHelper(int bufferCount){ this.bufferCount = bufferCount; } private int[] mDstFrameBuffer; private int[] mSrcFrameBuffer; - /** - * Copy 2 d texture to oes texture. - * - * @param srcTexture the src texture - * @param dstTexture the dst texture - * @param width the width - * @param height the height - * @param index the index - */ public void copy2DTextureToOesTexture( int srcTexture, int dstTexture, int width, int height, - int index) { - if (mDstFrameBuffer == null) { + int index){ + if(mDstFrameBuffer == null){ mDstFrameBuffer = new int[bufferCount]; GLES20.glGenFramebuffers(bufferCount, mDstFrameBuffer, 0); } - if (mSrcFrameBuffer == null) { + if(mSrcFrameBuffer == null){ mSrcFrameBuffer = new int[bufferCount]; GLES20.glGenFramebuffers(bufferCount, mSrcFrameBuffer, 0); } @@ -90,16 +70,13 @@ public void copy2DTextureToOesTexture( GLES30.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0); } - /** - * Release. - */ - public void release() { - if (mDstFrameBuffer != null) { + public void release(){ + if(mDstFrameBuffer != null){ GLES20.glDeleteFramebuffers(mDstFrameBuffer.length, mDstFrameBuffer, 0); mDstFrameBuffer = null; } - if (mSrcFrameBuffer != null) { + if(mSrcFrameBuffer != null){ GLES20.glDeleteFramebuffers(mSrcFrameBuffer.length, mSrcFrameBuffer, 0); mSrcFrameBuffer = null; } diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLFrameBuffer.java b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLFrameBuffer.java index 4372c0700..e30f17ac3 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLFrameBuffer.java +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLFrameBuffer.java @@ -4,12 +4,10 @@ import android.opengl.GLES11Ext; import android.opengl.GLES20; +import io.agora.base.internal.video.EglBase; import io.agora.base.internal.video.GlRectDrawer; import io.agora.base.internal.video.RendererCommon; -/** - * The type Gl frame buffer. - */ public class GLFrameBuffer { private int mFramebufferId = -1; @@ -21,20 +19,10 @@ public class GLFrameBuffer { private float[] mTexMatrix = GLUtils.IDENTITY_MATRIX; - /** - * Instantiates a new Gl frame buffer. - */ public GLFrameBuffer() { } - /** - * Sets size. - * - * @param width the width - * @param height the height - * @return the size - */ public boolean setSize(int width, int height) { if (mWidth != width || mHeight != height) { mWidth = width; @@ -45,66 +33,36 @@ public boolean setSize(int width, int height) { return false; } - /** - * Sets rotation. - * - * @param rotation the rotation - */ public void setRotation(int rotation) { if (mRotation != rotation) { mRotation = rotation; } } - /** - * Sets flip v. - * - * @param flipV the flip v - */ public void setFlipV(boolean flipV) { if (isFlipV != flipV) { isFlipV = flipV; } } - /** - * Sets flip h. - * - * @param flipH the flip h - */ public void setFlipH(boolean flipH) { if (isFlipH != flipH) { isFlipH = flipH; } } - /** - * Sets texture id. - * - * @param textureId the texture id - */ - public void setTextureId(int textureId) { - if (mTextureId != textureId) { + public void setTextureId(int textureId){ + if(mTextureId != textureId){ deleteTexture(); mTextureId = textureId; isTextureChanged = true; } } - /** - * Gets texture id. - * - * @return the texture id - */ - public int getTextureId() { + public int getTextureId(){ return mTextureId; } - /** - * Sets tex matrix. - * - * @param matrix the matrix - */ public void setTexMatrix(float[] matrix) { if (matrix != null) { mTexMatrix = matrix; @@ -113,43 +71,32 @@ public void setTexMatrix(float[] matrix) { } } - /** - * Reset transform. - */ - public void resetTransform() { + public void resetTransform(){ mTexMatrix = GLUtils.IDENTITY_MATRIX; - isFlipH = false; - isFlipV = false; + isFlipH = isFlipV = false; mRotation = 0; } - /** - * Process int. - * - * @param textureId the texture id - * @param textureType the texture type - * @return the int - */ public int process(int textureId, int textureType) { if (mWidth <= 0 && mHeight <= 0) { throw new RuntimeException("setSize firstly!"); } - if (mTextureId == -1) { + if(mTextureId == -1){ mTextureId = createTexture(mWidth, mHeight); bindFramebuffer(mTextureId); isTextureInner = true; - } else if (isTextureInner && isSizeChanged) { + }else if(isTextureInner && isSizeChanged){ GLES20.glDeleteTextures(1, new int[]{mTextureId}, 0); mTextureId = createTexture(mWidth, mHeight); bindFramebuffer(mTextureId); - } else if (isTextureChanged) { + }else if(isTextureChanged){ bindFramebuffer(mTextureId); } isTextureChanged = false; isSizeChanged = false; - if (drawer == null) { + if(drawer == null){ drawer = new GlRectDrawer(); } @@ -160,31 +107,31 @@ public int process(int textureId, int textureType) { transform.preTranslate(0.5f, 0.5f); transform.preRotate(mRotation, 0.f, 0.f); transform.preScale( - isFlipH ? -1.f : 1.f, - isFlipV ? -1.f : 1.f + isFlipH ? -1.f: 1.f, + isFlipV ? -1.f: 1.f ); transform.preTranslate(-0.5f, -0.5f); float[] matrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(transform); - if (textureType == GLES11Ext.GL_TEXTURE_EXTERNAL_OES) { - drawer.drawOes(textureId, matrix, mWidth, mHeight, 0, 0, mWidth, mHeight); - } else { - drawer.drawRgb(textureId, matrix, mWidth, mHeight, 0, 0, mWidth, mHeight); + synchronized (EglBase.lock){ + if(textureType == GLES11Ext.GL_TEXTURE_EXTERNAL_OES){ + drawer.drawOes(textureId, 0, matrix, mWidth, mHeight, 0, 0, mWidth, mHeight); + }else{ + drawer.drawRgb(textureId, 0, matrix, mWidth, mHeight, 0, 0, mWidth, mHeight); + } } - GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); + + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_NONE); GLES20.glFinish(); return mTextureId; } - /** - * Release. - */ - public void release() { + public void release(){ deleteTexture(); deleteFramebuffer(); - if (drawer != null) { + if(drawer != null){ drawer.release(); drawer = null; } @@ -198,14 +145,7 @@ private void deleteFramebuffer() { } } - /** - * Create texture int. - * - * @param width the width - * @param height the height - * @return the int - */ - public int createTexture(int width, int height) { + public int createTexture(int width, int height){ int[] textures = new int[1]; GLES20.glGenTextures(1, textures, 0); GLUtils.checkGlError("glGenTextures"); @@ -229,13 +169,6 @@ public int createTexture(int width, int height) { return textureId; } - /** - * Resize texture. - * - * @param textureId the texture id - * @param width the width - * @param height the height - */ public void resizeTexture(int textureId, int width, int height) { GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId); GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, @@ -252,7 +185,7 @@ private void deleteTexture() { } private void bindFramebuffer(int textureId) { - if (mFramebufferId == -1) { + if(mFramebufferId == -1){ int[] framebuffers = new int[1]; GLES20.glGenFramebuffers(1, framebuffers, 0); GLUtils.checkGlError("glGenFramebuffers"); diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLTextureBufferQueue.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLTextureBufferQueue.kt index 2b9ca20b1..c8d193f8f 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLTextureBufferQueue.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLTextureBufferQueue.kt @@ -29,14 +29,6 @@ import android.util.Log import android.util.Size import java.util.concurrent.ConcurrentLinkedQueue -/** - * G l texture buffer queue - * - * @property glFrameBuffer - * @property cacheCount - * @property loggable - * @constructor Create empty G l texture buffer queue - */ class GLTextureBufferQueue( private val glFrameBuffer: GLFrameBuffer = GLFrameBuffer(), private val cacheCount: Int = 6, @@ -49,12 +41,6 @@ class GLTextureBufferQueue( private val textureIdQueue = ConcurrentLinkedQueue() - /** - * Enqueue - * - * @param iN - * @return - */ fun enqueue(iN: TextureIn): Int { var size = textureIdQueue.size if (size < cacheCount) { @@ -73,7 +59,7 @@ class GLTextureBufferQueue( outSize.width, outSize.height, iN.isFrontCamera, - iN.isMirror + iN.isMirror, ) cacheTextureOuts[cacheIndex] = out } else if (out.width != outSize.width || out.height != outSize.height) { @@ -85,7 +71,7 @@ class GLTextureBufferQueue( outSize.width, outSize.height, iN.isFrontCamera, - iN.isMirror + iN.isMirror, ) cacheTextureOuts[cacheIndex] = out } else if(out.isFrontCamera != iN.isFrontCamera){ @@ -96,7 +82,7 @@ class GLTextureBufferQueue( out.width, out.height, iN.isFrontCamera, - iN.isMirror + iN.isMirror, ) cacheTextureOuts[cacheIndex] = out } @@ -121,7 +107,6 @@ class GLTextureBufferQueue( } glFrameBuffer.setFlipV(iN.flipV) glFrameBuffer.process(iN.textureId, iN.textureType) - GLES20.glFinish() out.index = cacheIndex out.tag = iN.tag textureIdQueue.offer(out) @@ -140,34 +125,24 @@ class GLTextureBufferQueue( return size } - /** - * Dequeue - * - * @return - */ - fun dequeue(): TextureOut? { + fun dequeue(remove: Boolean = true): TextureOut? { val size = textureIdQueue.size - val poll = textureIdQueue.poll() + val poll = if(remove){ + textureIdQueue.poll() + }else{ + textureIdQueue.peek() + } if(loggable){ Log.d(TAG, "TextureIdQueue dequeue index=${poll?.index}, size=$size") } - return poll } - /** - * Reset - * - */ fun reset() { cacheIndex = 0 textureIdQueue.clear() } - /** - * Release - * - */ fun release() { cacheIndex = 0 cacheTextureOuts.forEachIndexed { index, textureOut -> @@ -180,21 +155,6 @@ class GLTextureBufferQueue( glFrameBuffer.release() } - /** - * Texture in - * - * @property textureId - * @property textureType - * @property width - * @property height - * @property rotation - * @property flipV - * @property isFrontCamera - * @property isMirror - * @property transform - * @property tag - * @constructor Create empty Texture in - */ data class TextureIn( val textureId: Int, val textureType: Int, @@ -208,18 +168,6 @@ class GLTextureBufferQueue( val tag: Any? = null ) - /** - * Texture out - * - * @property index - * @property textureId - * @property textureType - * @property width - * @property height - * @property isFrontCamera - * @property tag - * @constructor Create empty Texture out - */ data class TextureOut( var index: Int = 0, val textureId: Int, diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLUtils.java b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLUtils.java index 887da3cc6..071587426 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLUtils.java +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLUtils.java @@ -44,14 +44,8 @@ import io.agora.beautyapi.faceunity.utils.LogUtils; -/** - * The type Gl utils. - */ -public final class GLUtils { +public class GLUtils { private static final String TAG = "GLUtils"; - /** - * The constant IDENTITY_MATRIX. - */ public static final float[] IDENTITY_MATRIX = new float[16]; static { @@ -61,14 +55,6 @@ public final class GLUtils { private GLUtils() { } - /** - * Gets texture 2 d image. - * - * @param textureID the texture id - * @param width the width - * @param height the height - * @return the texture 2 d image - */ public static Bitmap getTexture2DImage(int textureID, int width, int height) { try { int[] oldFboId = new int[1]; @@ -110,14 +96,6 @@ public static Bitmap getTexture2DImage(int textureID, int width, int height) { return null; } - /** - * Gets texture oes image. - * - * @param textureID the texture id - * @param width the width - * @param height the height - * @return the texture oes image - */ public static Bitmap getTextureOESImage(int textureID, int width, int height) { try { int[] oldFboId = new int[1]; @@ -159,14 +137,6 @@ public static Bitmap getTextureOESImage(int textureID, int width, int height) { return null; } - /** - * Nv 21 to bitmap bitmap. - * - * @param nv21 the nv 21 - * @param width the width - * @param height the height - * @return the bitmap - */ public static Bitmap nv21ToBitmap(byte[] nv21, int width, int height) { Bitmap bitmap = null; try { @@ -191,14 +161,6 @@ private static Bitmap readBitmap(int width, int height) { return bitmap; } - /** - * Create transform matrix float [ ]. - * - * @param rotation the rotation - * @param flipH the flip h - * @param flipV the flip v - * @return the float [ ] - */ public static float[] createTransformMatrix(int rotation, boolean flipH, boolean flipV) { float[] renderMVPMatrix = new float[16]; float[] tmp = new float[16]; @@ -231,11 +193,6 @@ public static float[] createTransformMatrix(int rotation, boolean flipH, boolean return renderMVPMatrix; } - /** - * Gets curr gl context. - * - * @return the curr gl context - */ public static EGLContext getCurrGLContext() { EGL10 egl = (EGL10) EGLContext.getEGL(); if (egl != null && !Objects.equals(egl.eglGetCurrentContext(), EGL10.EGL_NO_CONTEXT)) { @@ -244,11 +201,6 @@ public static EGLContext getCurrGLContext() { return null; } - /** - * Check gl error. - * - * @param op the op - */ public static void checkGlError(String op) { int error = GLES20.glGetError(); if (error != GLES20.GL_NO_ERROR) { @@ -258,13 +210,6 @@ public static void checkGlError(String op) { } } - /** - * Create program int. - * - * @param vertexSource the vertex source - * @param fragmentSource the fragment source - * @return the int - */ public static int createProgram(String vertexSource, String fragmentSource) { int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource); if (vertexShader == 0) { @@ -295,13 +240,6 @@ public static int createProgram(String vertexSource, String fragmentSource) { return program; } - /** - * Load shader int. - * - * @param shaderType the shader type - * @param source the source - * @return the int - */ public static int loadShader(int shaderType, String source) { int shader = GLES20.glCreateShader(shaderType); checkGlError("glCreateShader type=" + shaderType); @@ -318,17 +256,6 @@ public static int loadShader(int shaderType, String source) { return shader; } - /** - * Create texture int. - * - * @param textureTarget the texture target - * @param bitmap the bitmap - * @param minFilter the min filter - * @param magFilter the mag filter - * @param wrapS the wrap s - * @param wrapT the wrap t - * @return the int - */ public static int createTexture(int textureTarget, Bitmap bitmap, int minFilter, int magFilter, int wrapS, int wrapT) { int[] textureHandle = new int[1]; diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/TextureProcessHelper.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/TextureProcessHelper.kt index c36491c60..1451750b4 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/TextureProcessHelper.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/TextureProcessHelper.kt @@ -33,18 +33,13 @@ import java.util.concurrent.Executors import java.util.concurrent.Future import javax.microedition.khronos.egl.EGLContext -/** - * Texture process helper - * - * @property cacheCount - * @constructor Create empty Texture process helper - */ class TextureProcessHelper( private val cacheCount: Int = 2 ) { private val TAG = "TextureProcessHelper" - private val glTextureBufferQueueIn = GLTextureBufferQueue(cacheCount = cacheCount) - private val glTextureBufferQueueOut = GLTextureBufferQueue(cacheCount = cacheCount) + private val glTextureBufferQueueIn = GLTextureBufferQueue(cacheCount = cacheCount, loggable = true) + private val glTextureBufferQueueOut = GLTextureBufferQueue(cacheCount = cacheCount, loggable = false) + private val glFrameBuffer = GLFrameBuffer() private val futureQueue = ConcurrentLinkedQueue>() private val workerThread = Executors.newSingleThreadExecutor() private val eglContextHelper = @@ -55,29 +50,10 @@ class TextureProcessHelper( private var isBegin = false private var frameIndex = 0 - /** - * Set filter - * - * @param filter - * @receiver - */ fun setFilter(filter: (GLTextureBufferQueue.TextureOut) -> Int) { this.filter = filter } - /** - * Process - * - * @param texId - * @param texType - * @param width - * @param height - * @param rotation - * @param transform - * @param isFrontCamera - * @param isMirror - * @return - */ fun process( texId: Int, texType: Int, width: Int, height: Int, rotation: Int, @@ -112,7 +88,7 @@ class TextureProcessHelper( width, height, rotation, - true, + false, isFrontCamera, isMirror, transform, @@ -130,7 +106,7 @@ class TextureProcessHelper( return@Callable -2 } - val frame = glTextureBufferQueueIn.dequeue() ?: return@Callable -2 + val frame = glTextureBufferQueueIn.dequeue(false) ?: return@Callable -2 val filterTexId = filter?.invoke(frame) ?: -1 if (filterTexId >= 0) { glTextureBufferQueueOut.enqueue( @@ -163,7 +139,7 @@ class TextureProcessHelper( ) ) } - + glTextureBufferQueueIn.dequeue(true) return@Callable 0 })) @@ -173,8 +149,9 @@ class TextureProcessHelper( try { val get = futureQueue.poll()?.get() ?: -1 if (get == 0) { - val dequeue = glTextureBufferQueueOut.dequeue() - ret = dequeue?.textureId ?: -1 + val dequeue = glTextureBufferQueueOut.dequeue() ?: return -1 + glFrameBuffer.setSize(dequeue.width, dequeue.height) + ret = glFrameBuffer.process(dequeue.textureId, dequeue.textureType) } }catch (e: Exception){ LogUtils.e(TAG, "process end with exception: $e") @@ -184,10 +161,6 @@ class TextureProcessHelper( return ret } - /** - * Reset - * - */ fun reset(){ if(frameIndex == 0){ return @@ -199,22 +172,15 @@ class TextureProcessHelper( future.cancel(true) future = futureQueue.poll() } + glTextureBufferQueueIn.reset() +// glFrameBuffer.release() executeSync { - glTextureBufferQueueIn.reset() glTextureBufferQueueOut.reset() } } - /** - * Size - * - */ fun size() = futureQueue.size - /** - * Release - * - */ fun release() { isReleased = true filter = null @@ -225,23 +191,18 @@ class TextureProcessHelper( future.cancel(true) future = futureQueue.poll() } + glTextureBufferQueueIn.release() + glFrameBuffer.release() executeSync { + glTextureBufferQueueOut.release() if (eglContextBase != null) { eglContextHelper.release() eglContextBase = null } - glTextureBufferQueueIn.release() - glTextureBufferQueueOut.release() } workerThread.shutdown() } - /** - * Execute sync - * - * @param run - * @receiver - */ fun executeSync(run: () -> Unit) { val latch = CountDownLatch(1) workerThread.execute { diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/SenseTimeBeautyAPI.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/SenseTimeBeautyAPI.kt index 517d2f1ea..0ecec0df9 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/SenseTimeBeautyAPI.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/SenseTimeBeautyAPI.kt @@ -32,37 +32,13 @@ import io.agora.base.VideoFrame import io.agora.rtc2.Constants import io.agora.rtc2.RtcEngine -/** - * Version - */ -const val VERSION = "1.0.3" +const val VERSION = "1.0.6" -/** - * Capture mode - * - * @constructor Create empty Capture mode - */ enum class CaptureMode{ - /** - * Agora - * - * @constructor Create empty Agora - */ Agora, // 使用声网内部的祼数据接口进行处理 - - /** - * Custom - * - * @constructor Create empty Custom - */ Custom // 自定义模式,需要自己调用onFrame接口将原始视频帧传给BeautyAPI做处理 } -/** - * I event callback - * - * @constructor Create empty I event callback - */ interface IEventCallback{ /** @@ -73,83 +49,27 @@ interface IEventCallback{ fun onBeautyStats(stats: BeautyStats) } -/** - * Beauty stats - * - * @property minCostMs - * @property maxCostMs - * @property averageCostMs - * @constructor Create empty Beauty stats - */ data class BeautyStats( val minCostMs:Long, // 统计区间内的最小值 val maxCostMs: Long, // 统计区间内的最大值 val averageCostMs: Long // 统计区间内的平均值 ) -/** - * Mirror mode - * - * @constructor Create empty Mirror mode - */ enum class MirrorMode { // 没有镜像正常画面的定义:前置拍到画面和手机看到画面是左右不一致的,后置拍到画面和手机看到画面是左右一致的 - /** - * Mirror Local Remote - * - * @constructor Create empty Mirror Local Remote - */ MIRROR_LOCAL_REMOTE, //本地远端都镜像,前置默认,本地和远端贴纸都正常 - - /** - * Mirror Local Only - * - * @constructor Create empty Mirror Local Only - */ MIRROR_LOCAL_ONLY, // 仅本地镜像,远端不镜像,,远端贴纸正常,本地贴纸镜像。用于打电话场景,电商直播场景(保证电商直播后面的告示牌文字是正的);这种模式因为本地远端是反的,所以肯定有一边的文字贴纸方向会是反的 - - /** - * Mirror Remote Only - * - * @constructor Create empty Mirror Remote Only - */ MIRROR_REMOTE_ONLY, // 仅远端镜像,本地不镜像,远端贴纸正常,本地贴纸镜像 - - /** - * Mirror None - * - * @constructor Create empty Mirror None - */ MIRROR_NONE // 本地远端都不镜像,后置默认,本地和远端贴纸都正常 } -/** - * Camera config - * - * @property frontMirror - * @property backMirror - * @constructor Create empty Camera config - */ data class CameraConfig( val frontMirror: MirrorMode = MirrorMode.MIRROR_LOCAL_REMOTE, // 前置默认镜像:本地远端都镜像 val backMirror: MirrorMode = MirrorMode.MIRROR_NONE // 后置默认镜像:本地远端都不镜像 ) -/** - * Config - * - * @property context - * @property rtcEngine - * @property stHandlers - * @property eventCallback - * @property captureMode - * @property statsDuration - * @property statsEnable - * @property cameraConfig - * @constructor Create empty Config - */ data class Config( val context: Context, // Android Context上下文 val rtcEngine: RtcEngine, // 声网Rtc引擎 @@ -161,115 +81,28 @@ data class Config( val cameraConfig: CameraConfig = CameraConfig() // 摄像头镜像配置 ) -/** - * S t handlers - * - * @property effectNative - * @property humanActionNative - * @constructor Create empty S t handlers - */ data class STHandlers( val effectNative: STMobileEffectNative, val humanActionNative: STMobileHumanActionNative ) -/** - * Error code - * - * @property value - * @constructor Create empty Error code - */ enum class ErrorCode(val value: Int) { - /** - * Error Ok - * - * @constructor Create empty Error Ok - */ ERROR_OK(0), // 一切正常 - - /** - * Error Has Not Initialized - * - * @constructor Create empty Error Has Not Initialized - */ ERROR_HAS_NOT_INITIALIZED(101), // 没有调用Initialize或调用失败情况下调用了其他API - - /** - * Error Has Initialized - * - * @constructor Create empty Error Has Initialized - */ ERROR_HAS_INITIALIZED(102), // 已经Initialize成功后再次调用报错 - - /** - * Error Has Released - * - * @constructor Create empty Error Has Released - */ ERROR_HAS_RELEASED(103), // 已经调用release销毁后还调用其他API - - /** - * Error Process Not Custom - * - * @constructor Create empty Error Process Not Custom - */ ERROR_PROCESS_NOT_CUSTOM(104), // 非Custom处理模式下调用onFrame接口从外部传入视频帧 - - /** - * Error Process Disable - * - * @constructor Create empty Error Process Disable - */ - ERROR_PROCESS_DISABLE(105), // 当调用enable(false)禁用美颜后调用onFrame接口返回 - - /** - * Error View Type Error - * - * @constructor Create empty Error View Type Error - */ - ERROR_VIEW_TYPE_ERROR(106), // 当调用setupLocalVideo时view类型错误时返回 - - /** - * Error Frame Skipped - * - * @constructor Create empty Error Frame Skipped - */ - ERROR_FRAME_SKIPPED(107), // 当处理帧忽略时在onFrame返回 + ERROR_VIEW_TYPE_ERROR(105), // 当调用setupLocalVideo时view类型错误时返回 + ERROR_FRAME_SKIPPED(106), // 当处理帧忽略时在onFrame返回 } -/** - * Beauty preset - * - * @constructor Create empty Beauty preset - */ enum class BeautyPreset { - /** - * Custom - * - * @constructor Create empty Custom - */ CUSTOM, // 不使用推荐的美颜参数 - - /** - * Default - * - * @constructor Create empty Default - */ DEFAULT // 默认的 } -/** - * Create sense time beauty a p i - * - * @return - */ fun createSenseTimeBeautyAPI(): SenseTimeBeautyAPI = SenseTimeBeautyAPIImpl() -/** - * Sense time beauty a p i - * - * @constructor Create empty Sense time beauty a p i - */ interface SenseTimeBeautyAPI { /** @@ -324,12 +157,20 @@ interface SenseTimeBeautyAPI { fun isFrontCamera(): Boolean /** - * Get mirror applied + * 获取镜像状态 * - * @return + * @return 镜像状态,true: 镜像,false:非镜像 */ fun getMirrorApplied(): Boolean + + /** + * 在处理线程里执行操作 + * + * @param run 操作run + */ + fun runOnProcessThread(run: ()->Unit) + /** * 私参配置,用于不对外api的调用,多用于测试 */ diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/SenseTimeBeautyAPIImpl.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/SenseTimeBeautyAPIImpl.kt index fe056a10f..11c2461c3 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/SenseTimeBeautyAPIImpl.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/SenseTimeBeautyAPIImpl.kt @@ -52,14 +52,10 @@ import io.agora.rtc2.gl.EglBaseProvider import io.agora.rtc2.video.IVideoFrameObserver import io.agora.rtc2.video.VideoCanvas import java.nio.ByteBuffer +import java.util.Collections import java.util.concurrent.Callable import java.util.concurrent.Executors -/** - * Sense time beauty a p i impl - * - * @constructor Create empty Sense time beauty a p i impl - */ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { private val TAG = "SenseTimeBeautyAPIImpl" private val reportId = "scenarioAPI" @@ -81,64 +77,25 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { private var isFrontCamera = true private var cameraConfig = CameraConfig() private var localVideoRenderMode = Constants.RENDER_MODE_HIDDEN + private val pendingProcessRunList = Collections.synchronizedList(mutableListOf<()->Unit>()) private enum class ProcessSourceType{ - /** - * Unknown - * - * @constructor Create empty Unknown - */ UNKNOWN, - - /** - * Texture Oes Api26 - * - * @constructor Create empty Texture Oes Api26 - */ TEXTURE_OES_API26, - - /** - * Texture 2d Api26 - * - * @constructor Create empty Texture 2d Api26 - */ TEXTURE_2D_API26, - - /** - * Texture Oes - * - * @constructor Create empty Texture Oes - */ TEXTURE_OES, - - /** - * Texture 2d - * - * @constructor Create empty Texture 2d - */ TEXTURE_2D, - - /** - * I420 - * - * @constructor Create empty I420 - */ I420, } private var currProcessSourceType = ProcessSourceType.UNKNOWN - /** - * Initialize - * - * @param config - * @return - */ override fun initialize(config: Config): Int { if (this.config != null) { LogUtils.e(TAG, "initialize >> The beauty api has been initialized!") return ErrorCode.ERROR_HAS_INITIALIZED.value } this.config = config + this.cameraConfig = config.cameraConfig if (config.captureMode == CaptureMode.Agora) { config.rtcEngine.registerVideoFrameObserver(this) } @@ -146,7 +103,6 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { this.config?.eventCallback?.onBeautyStats(it) } cameraConfig = CameraConfig(config.cameraConfig.frontMirror, config.cameraConfig.backMirror) - LogUtils.setLogFilePath(config.context.getExternalFilesDir("")?.absolutePath ?: "") LogUtils.i(TAG, "initialize >> config = $config") LogUtils.i(TAG, "initialize >> beauty api version=$VERSION, beauty sdk version=${STCommonNative.getVersion()}") // config.rtcEngine.setParameters("{\"rtc.qos_for_test_purpose\":101}") // 实时上报 @@ -154,12 +110,6 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { return ErrorCode.ERROR_OK.value } - /** - * Enable - * - * @param enable - * @return - */ override fun enable(enable: Boolean): Int { LogUtils.i(TAG, "enable >> enable = $enable") if (config == null) { @@ -185,13 +135,6 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { return ErrorCode.ERROR_OK.value } - /** - * Setup local video - * - * @param view - * @param renderMode - * @return - */ override fun setupLocalVideo(view: View, renderMode: Int): Int { val rtcEngine = config?.rtcEngine if(rtcEngine == null){ @@ -210,12 +153,6 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { return ErrorCode.ERROR_VIEW_TYPE_ERROR.value } - /** - * On frame - * - * @param videoFrame - * @return - */ override fun onFrame(videoFrame: VideoFrame): Int { val conf = config if(conf == null){ @@ -230,9 +167,6 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { LogUtils.e(TAG, "onFrame >> The capture mode is not Custom!") return ErrorCode.ERROR_PROCESS_NOT_CUSTOM.value } - if (!enable) { - return ErrorCode.ERROR_PROCESS_DISABLE.value - } if (processBeauty(videoFrame)) { return ErrorCode.ERROR_OK.value } @@ -240,12 +174,6 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { return ErrorCode.ERROR_FRAME_SKIPPED.value } - /** - * Set beauty preset - * - * @param preset - * @return - */ override fun setBeautyPreset(preset: BeautyPreset): Int { val effectNative = config?.stHandlers?.effectNative if(effectNative == null){ @@ -368,12 +296,24 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { return ErrorCode.ERROR_OK.value } - /** - * Update camera config - * - * @param config - * @return - */ + override fun runOnProcessThread(run: () -> Unit) { + if (config == null) { + LogUtils.e(TAG, "runOnProcessThread >> The beauty api has not been initialized!") + return + } + if (isReleased) { + LogUtils.e(TAG, "runOnProcessThread >> The beauty api has been released!") + return + } + if (textureBufferHelper?.handler?.looper?.thread == Thread.currentThread()) { + run.invoke() + } else if (textureBufferHelper != null) { + textureBufferHelper?.invoke(run) + } else { + pendingProcessRunList.add(run) + } + } + override fun updateCameraConfig(config: CameraConfig): Int { LogUtils.i(TAG, "updateCameraConfig >> oldCameraConfig=$cameraConfig, newCameraConfig=$config") cameraConfig = CameraConfig(config.frontMirror, config.backMirror) @@ -382,31 +322,17 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { return ErrorCode.ERROR_OK.value } - /** - * Is front camera - * - */ override fun isFrontCamera() = isFrontCamera - /** - * Set parameters - * - * @param key - * @param value - */ override fun setParameters(key: String, value: String) { when(key){ "beauty_mode" -> beautyMode = value.toInt() } } - /** - * Release - * - * @return - */ override fun release(): Int { - if(config == null){ + val conf = config + if(conf == null){ LogUtils.e(TAG, "release >> The beauty api has not been initialized!") return ErrorCode.ERROR_HAS_NOT_INITIALIZED.value } @@ -414,13 +340,17 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { LogUtils.e(TAG, "setBeautyPreset >> The beauty api has been released!") return ErrorCode.ERROR_HAS_RELEASED.value } - config?.rtcEngine?.sendCustomReportMessage(reportId, reportCategory, "release", "", 0) + if (conf.captureMode == CaptureMode.Agora) { + conf.rtcEngine.registerVideoFrameObserver(null) + } + conf.rtcEngine.sendCustomReportMessage(reportId, reportCategory, "release", "", 0) LogUtils.i(TAG, "release") isReleased = true workerThreadExecutor.shutdown() textureBufferHelper?.let { textureBufferHelper = null + // it.handler.removeCallbacksAndMessages(null) it.invoke { beautyProcessor?.release() null @@ -429,6 +359,7 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { } statsHelper?.reset() statsHelper = null + pendingProcessRunList.clear() return ErrorCode.ERROR_OK.value } @@ -510,6 +441,15 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { "STRender", EglBaseProvider.instance().rootEglBase.eglBaseContext ) + textureBufferHelper?.invoke { + synchronized(pendingProcessRunList){ + val iterator = pendingProcessRunList.iterator() + while (iterator.hasNext()){ + iterator.next().invoke() + iterator.remove() + } + } + } LogUtils.i(TAG, "processBeauty >> create texture buffer, beautyMode=$beautyMode") } @@ -604,7 +544,7 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { TextureBuffer.Type.OES -> GLES11Ext.GL_TEXTURE_EXTERNAL_OES else -> GLES20.GL_TEXTURE_2D }, - textureMatrix = matrix + textureMatrix = matrix, ) )?.textureId ?: -1 }) @@ -722,79 +662,32 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { // IVideoFrameObserver implements - /** - * On capture video frame - * - * @param sourceType - * @param videoFrame - * @return - */ override fun onCaptureVideoFrame(sourceType: Int, videoFrame: VideoFrame?): Boolean { videoFrame ?: return false return processBeauty(videoFrame) } - /** - * On pre encode video frame - * - * @param sourceType - * @param videoFrame - * @return - */ override fun onPreEncodeVideoFrame(sourceType: Int, videoFrame: VideoFrame?) : Boolean { return true } - /** - * On media player video frame - * - * @param videoFrame - * @param mediaPlayerId - */ override fun onMediaPlayerVideoFrame(videoFrame: VideoFrame?, mediaPlayerId: Int) = false - /** - * On render video frame - * - * @param channelId - * @param uid - * @param videoFrame - */ override fun onRenderVideoFrame( channelId: String?, uid: Int, videoFrame: VideoFrame? ) = false - /** - * Get video frame process mode - * - */ override fun getVideoFrameProcessMode() = IVideoFrameObserver.PROCESS_MODE_READ_WRITE - /** - * Get video format preference - * - */ override fun getVideoFormatPreference() = IVideoFrameObserver.VIDEO_PIXEL_DEFAULT - /** - * Get rotation applied - * - */ override fun getRotationApplied() = false - /** - * Get mirror applied - * - */ override fun getMirrorApplied() = captureMirror && !enable - /** - * Get observed frame position - * - */ override fun getObservedFramePosition() = IVideoFrameObserver.POSITION_POST_CAPTURER } \ No newline at end of file diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/LogUtils.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/LogUtils.kt index c73922732..81c551e54 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/LogUtils.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/LogUtils.kt @@ -24,118 +24,33 @@ package io.agora.beautyapi.sensetime.utils -import android.util.Log -import java.io.File -import java.io.FileOutputStream -import java.text.SimpleDateFormat -import java.util.Date -import java.util.Locale -import java.util.concurrent.Executors +import io.agora.base.internal.Logging -/** - * Log utils - * - * @constructor Create empty Log utils - */ object LogUtils { private const val beautyType = "SenseTime" - private val timeFormat = SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS", Locale.ROOT) - private val logFileName = "agora_beautyapi_${beautyType.toLowerCase(Locale.US)}_android.log" - private val workerThread = Executors.newSingleThreadExecutor() - private var logOutputStream: FileOutputStream? = null - - /** - * Set log file path - * - * @param path - */ - @JvmStatic - fun setLogFilePath(path: String){ - if(path.isEmpty()){ - e("LogUtils", "setLogFilePath >> path is empty!") - return - } - val direction = File(path) - if(!direction.exists()){ - direction.mkdirs() - } - val file = File(direction, logFileName) - if(!file.exists()){ - file.createNewFile() - } - val append = file.length() < 2 * 1024 * 1024 - logOutputStream = FileOutputStream(file, append) - } - - /** - * I - * - * @param tag - * @param content - * @param args - */ @JvmStatic fun i(tag: String, content: String, vararg args: Any) { val consoleMessage = "[BeautyAPI][$beautyType] : ${String.format(content, args)}" - val fileMessage = "${timeFormat.format(Date())} : [BeautyAPI][$beautyType][$tag][INFO] : ${String.format(content, args)}" - Log.v(tag, consoleMessage) - saveToFile(fileMessage) + Logging.log(Logging.Severity.LS_INFO, tag, consoleMessage) } - /** - * D - * - * @param tag - * @param content - * @param args - */ @JvmStatic fun d(tag: String, content: String, vararg args: Any) { val consoleMessage = "[BeautyAPI][$beautyType] : ${String.format(content, args)}" - val fileMessage = "${timeFormat.format(Date())} : [BeautyAPI][$beautyType][$tag][DEBUG] : ${String.format(content, args)}" - Log.d(tag, consoleMessage) - saveToFile(fileMessage) + Logging.d(tag, consoleMessage) } - /** - * W - * - * @param tag - * @param content - * @param args - */ @JvmStatic fun w(tag: String, content: String, vararg args: Any){ val consoleMessage = "[BeautyAPI][$beautyType] : ${String.format(content, args)}" - val fileMessage = "${timeFormat.format(Date())} : [BeautyAPI][$beautyType][$tag][WARN] : ${String.format(content, args)}" - Log.w(tag, consoleMessage) - saveToFile(fileMessage) + Logging.w(tag, consoleMessage) } - /** - * E - * - * @param tag - * @param content - * @param args - */ @JvmStatic fun e(tag: String, content: String, vararg args: Any){ val consoleMessage = "[BeautyAPI][$beautyType] : ${String.format(content, args)}" - val fileMessage = "${timeFormat.format(Date())} : [BeautyAPI][$beautyType][$tag][ERROR] : ${String.format(content, args)}" - Log.e(tag, consoleMessage) - saveToFile(fileMessage) + Logging.e(tag, consoleMessage) } - - private fun saveToFile(message: String){ - val outputStream = logOutputStream ?: return - workerThread.execute { - outputStream.write(message.toByteArray()) - if(!message.endsWith("\n")){ - outputStream.write("\n".toByteArray()) - } - } - } } \ No newline at end of file diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/StatsHelper.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/StatsHelper.kt index 748a8919d..7391003ae 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/StatsHelper.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/StatsHelper.kt @@ -30,13 +30,6 @@ import io.agora.beautyapi.sensetime.BeautyStats import kotlin.math.max import kotlin.math.min -/** - * Stats helper - * - * @property statsDuration - * @property onStatsChanged - * @constructor Create empty Stats helper - */ class StatsHelper( private val statsDuration: Long, private val onStatsChanged: (BeautyStats) -> Unit @@ -48,11 +41,6 @@ class StatsHelper( private var mCostMax = 0L private var mCostMin = Long.MAX_VALUE - /** - * Once - * - * @param cost - */ fun once(cost: Long) { val curr = System.currentTimeMillis() if (mStartTime == 0L) { @@ -80,10 +68,6 @@ class StatsHelper( mCostMin = min(mCostMin, cost) } - /** - * Reset - * - */ fun reset() { mMainHandler.removeCallbacksAndMessages(null) mStartTime = 0 diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/egl/GLCopyHelper.java b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/egl/GLCopyHelper.java index 2385613ed..f939bd62e 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/egl/GLCopyHelper.java +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/egl/GLCopyHelper.java @@ -4,51 +4,31 @@ import android.opengl.GLES20; import android.opengl.GLES30; -/** - * The type Gl copy helper. - */ public class GLCopyHelper { private final int bufferCount; - /** - * Instantiates a new Gl copy helper. - */ - public GLCopyHelper() { + public GLCopyHelper(){ this(1); } - /** - * Instantiates a new Gl copy helper. - * - * @param bufferCount the buffer count - */ - public GLCopyHelper(int bufferCount) { + public GLCopyHelper(int bufferCount){ this.bufferCount = bufferCount; } private int[] mDstFrameBuffer; private int[] mSrcFrameBuffer; - /** - * Copy 2 d texture to oes texture. - * - * @param srcTexture the src texture - * @param dstTexture the dst texture - * @param width the width - * @param height the height - * @param index the index - */ public void copy2DTextureToOesTexture( int srcTexture, int dstTexture, int width, int height, - int index) { - if (mDstFrameBuffer == null) { + int index){ + if(mDstFrameBuffer == null){ mDstFrameBuffer = new int[bufferCount]; GLES20.glGenFramebuffers(bufferCount, mDstFrameBuffer, 0); } - if (mSrcFrameBuffer == null) { + if(mSrcFrameBuffer == null){ mSrcFrameBuffer = new int[bufferCount]; GLES20.glGenFramebuffers(bufferCount, mSrcFrameBuffer, 0); } @@ -66,16 +46,13 @@ public void copy2DTextureToOesTexture( GLES30.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0); } - /** - * Release. - */ - public void release() { - if (mDstFrameBuffer != null) { + public void release(){ + if(mDstFrameBuffer != null){ GLES20.glDeleteFramebuffers(mDstFrameBuffer.length, mDstFrameBuffer, 0); mDstFrameBuffer = null; } - if (mSrcFrameBuffer != null) { + if(mSrcFrameBuffer != null){ GLES20.glDeleteFramebuffers(mSrcFrameBuffer.length, mSrcFrameBuffer, 0); mSrcFrameBuffer = null; } diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/egl/GLFrameBuffer.java b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/egl/GLFrameBuffer.java index b9ae7a1e5..3db7fdd06 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/egl/GLFrameBuffer.java +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/egl/GLFrameBuffer.java @@ -7,9 +7,6 @@ import io.agora.base.internal.video.GlRectDrawer; import io.agora.base.internal.video.RendererCommon; -/** - * The type Gl frame buffer. - */ public class GLFrameBuffer { private int mFramebufferId = -1; @@ -21,20 +18,10 @@ public class GLFrameBuffer { private float[] mTexMatrix = GlUtil.IDENTITY_MATRIX; - /** - * Instantiates a new Gl frame buffer. - */ public GLFrameBuffer() { } - /** - * Sets size. - * - * @param width the width - * @param height the height - * @return the size - */ public boolean setSize(int width, int height) { if (mWidth != width || mHeight != height) { mWidth = width; @@ -45,66 +32,36 @@ public boolean setSize(int width, int height) { return false; } - /** - * Sets rotation. - * - * @param rotation the rotation - */ public void setRotation(int rotation) { if (mRotation != rotation) { mRotation = rotation; } } - /** - * Sets flip v. - * - * @param flipV the flip v - */ public void setFlipV(boolean flipV) { if (isFlipV != flipV) { isFlipV = flipV; } } - /** - * Sets flip h. - * - * @param flipH the flip h - */ public void setFlipH(boolean flipH) { if (isFlipH != flipH) { isFlipH = flipH; } } - /** - * Sets texture id. - * - * @param textureId the texture id - */ - public void setTextureId(int textureId) { - if (mTextureId != textureId) { + public void setTextureId(int textureId){ + if(mTextureId != textureId){ deleteTexture(); mTextureId = textureId; isTextureChanged = true; } } - /** - * Gets texture id. - * - * @return the texture id - */ - public int getTextureId() { + public int getTextureId(){ return mTextureId; } - /** - * Sets tex matrix. - * - * @param matrix the matrix - */ public void setTexMatrix(float[] matrix) { if (matrix != null) { mTexMatrix = matrix; @@ -113,43 +70,32 @@ public void setTexMatrix(float[] matrix) { } } - /** - * Reset transform. - */ - public void resetTransform() { + public void resetTransform(){ mTexMatrix = GlUtil.IDENTITY_MATRIX; - isFlipH = false; - isFlipV = false; + isFlipH = isFlipV = false; mRotation = 0; } - /** - * Process int. - * - * @param textureId the texture id - * @param textureType the texture type - * @return the int - */ public int process(int textureId, int textureType) { if (mWidth <= 0 && mHeight <= 0) { throw new RuntimeException("setSize firstly!"); } - if (mTextureId == -1) { + if(mTextureId == -1){ mTextureId = createTexture(mWidth, mHeight); bindFramebuffer(mTextureId); isTextureInner = true; - } else if (isTextureInner && isSizeChanged) { + }else if(isTextureInner && isSizeChanged){ GLES20.glDeleteTextures(1, new int[]{mTextureId}, 0); mTextureId = createTexture(mWidth, mHeight); bindFramebuffer(mTextureId); - } else if (isTextureChanged) { + }else if(isTextureChanged){ bindFramebuffer(mTextureId); } isTextureChanged = false; isSizeChanged = false; - if (drawer == null) { + if(drawer == null){ drawer = new GlRectDrawer(); } @@ -160,16 +106,16 @@ public int process(int textureId, int textureType) { transform.preTranslate(0.5f, 0.5f); transform.preRotate(mRotation, 0.f, 0.f); transform.preScale( - isFlipH ? -1.f : 1.f, - isFlipV ? -1.f : 1.f + isFlipH ? -1.f: 1.f, + isFlipV ? -1.f: 1.f ); transform.preTranslate(-0.5f, -0.5f); float[] matrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(transform); - if (textureType == GLES11Ext.GL_TEXTURE_EXTERNAL_OES) { - drawer.drawOes(textureId, matrix, mWidth, mHeight, 0, 0, mWidth, mHeight); - } else { - drawer.drawRgb(textureId, matrix, mWidth, mHeight, 0, 0, mWidth, mHeight); + if(textureType == GLES11Ext.GL_TEXTURE_EXTERNAL_OES){ + drawer.drawOes(textureId, 0, matrix, mWidth, mHeight, 0, 0, mWidth, mHeight); + }else{ + drawer.drawRgb(textureId, 0, matrix, mWidth, mHeight, 0, 0, mWidth, mHeight); } GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); GLES20.glFinish(); @@ -177,14 +123,11 @@ public int process(int textureId, int textureType) { return mTextureId; } - /** - * Release. - */ - public void release() { + public void release(){ deleteTexture(); deleteFramebuffer(); - if (drawer != null) { + if(drawer != null){ drawer.release(); drawer = null; } @@ -198,14 +141,7 @@ private void deleteFramebuffer() { } } - /** - * Create texture int. - * - * @param width the width - * @param height the height - * @return the int - */ - public int createTexture(int width, int height) { + public int createTexture(int width, int height){ int[] textures = new int[1]; GLES20.glGenTextures(1, textures, 0); GlUtil.checkGlError("glGenTextures"); @@ -229,13 +165,6 @@ public int createTexture(int width, int height) { return textureId; } - /** - * Resize texture. - * - * @param textureId the texture id - * @param width the width - * @param height the height - */ public void resizeTexture(int textureId, int width, int height) { GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId); GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, @@ -252,7 +181,7 @@ private void deleteTexture() { } private void bindFramebuffer(int textureId) { - if (mFramebufferId == -1) { + if(mFramebufferId == -1){ int[] framebuffers = new int[1]; GLES20.glGenFramebuffers(1, framebuffers, 0); GlUtil.checkGlError("glGenFramebuffers"); diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/egl/GLTestUtils.java b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/egl/GLTestUtils.java index bfacdf7cd..67f65cad1 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/egl/GLTestUtils.java +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/egl/GLTestUtils.java @@ -39,24 +39,9 @@ import io.agora.beautyapi.sensetime.utils.LogUtils; -/** - * The type Gl test utils. - */ -public final class GLTestUtils { +public class GLTestUtils { private static final String TAG = "GLUtils"; - private GLTestUtils() { - - } - - /** - * Gets texture 2 d image. - * - * @param textureID the texture id - * @param width the width - * @param height the height - * @return the texture 2 d image - */ public static Bitmap getTexture2DImage(int textureID, int width, int height) { try { int[] oldFboId = new int[1]; @@ -98,14 +83,6 @@ public static Bitmap getTexture2DImage(int textureID, int width, int height) { return null; } - /** - * Gets texture oes image. - * - * @param textureID the texture id - * @param width the width - * @param height the height - * @return the texture oes image - */ public static Bitmap getTextureOESImage(int textureID, int width, int height) { try { int[] oldFboId = new int[1]; @@ -147,14 +124,6 @@ public static Bitmap getTextureOESImage(int textureID, int width, int height) { return null; } - /** - * Nv 21 to bitmap bitmap. - * - * @param nv21 the nv 21 - * @param width the width - * @param height the height - * @return the bitmap - */ public static Bitmap nv21ToBitmap(byte[] nv21, int width, int height) { Bitmap bitmap = null; try { @@ -169,7 +138,7 @@ public static Bitmap nv21ToBitmap(byte[] nv21, int width, int height) { return bitmap; } - private static Bitmap readBitmap(int width, int height) { + private static Bitmap readBitmap(int width, int height){ ByteBuffer rgbaBuf = ByteBuffer.allocateDirect(width * height * 4); rgbaBuf.position(0); GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaBuf); diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/egl/GLTextureBufferQueue.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/egl/GLTextureBufferQueue.kt index bf973bbb4..d0cf57ef7 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/egl/GLTextureBufferQueue.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/egl/GLTextureBufferQueue.kt @@ -29,13 +29,6 @@ import android.util.Size import io.agora.beautyapi.sensetime.utils.LogUtils import java.util.concurrent.ConcurrentLinkedQueue -/** - * G l texture buffer queue - * - * @property glFrameBuffer - * @property cacheCount - * @constructor Create empty G l texture buffer queue - */ class GLTextureBufferQueue( private val glFrameBuffer: GLFrameBuffer, private val cacheCount: Int = 6 @@ -47,12 +40,6 @@ class GLTextureBufferQueue( private val textureIdQueue = ConcurrentLinkedQueue() - /** - * Enqueue - * - * @param iN - * @return - */ fun enqueue(iN: TextureIn): Int { var size = textureIdQueue.size if (size < cacheCount) { @@ -96,25 +83,27 @@ class GLTextureBufferQueue( cacheTextureOuts[cacheIndex] = out } + var flipV = true + var flipH = false glFrameBuffer.textureId = out.textureId glFrameBuffer.setSize(out.width, out.height) glFrameBuffer.resetTransform() glFrameBuffer.setRotation(iN.rotation) if (iN.transform != null) { glFrameBuffer.setTexMatrix(iN.transform) - var flipH = iN.isFrontCamera - if(iN.isMirror){ - flipH = !flipH - } - glFrameBuffer.setFlipH(flipH) + flipH = iN.isFrontCamera } else { - var flipH = !iN.isFrontCamera - if(iN.isMirror){ - flipH = !flipH - } - glFrameBuffer.setFlipH(flipH) + flipH = !iN.isFrontCamera + } + if(iN.isMirror){ + flipH = !flipH + } + if(iN.rotation == 0 || iN.rotation == 180){ + flipV = !flipV + flipH = !flipH } - glFrameBuffer.setFlipV(true) + glFrameBuffer.setFlipH(flipH) + glFrameBuffer.setFlipV(flipV) glFrameBuffer.process(iN.textureId, iN.textureType) GLES20.glFinish() out.index = cacheIndex @@ -129,36 +118,19 @@ class GLTextureBufferQueue( return size } - /** - * Dequeue - * - * @return - */ fun dequeue(): TextureOut? { val size = textureIdQueue.size val poll = textureIdQueue.poll() return poll } - /** - * Size - * - */ fun size() = textureIdQueue.size - /** - * Reset - * - */ fun reset() { cacheIndex = 0 textureIdQueue.clear() } - /** - * Release - * - */ fun release() { cacheIndex = 0 cacheTextureOuts.forEachIndexed { index, textureOut -> @@ -170,19 +142,6 @@ class GLTextureBufferQueue( textureIdQueue.clear() } - /** - * Texture in - * - * @property textureId - * @property textureType - * @property width - * @property height - * @property rotation - * @property isFrontCamera - * @property isMirror - * @property transform - * @constructor Create empty Texture in - */ data class TextureIn( val textureId: Int, val textureType: Int, @@ -194,17 +153,6 @@ class GLTextureBufferQueue( val transform: FloatArray? ) - /** - * Texture out - * - * @property index - * @property textureId - * @property textureType - * @property width - * @property height - * @property isFrontCamera - * @constructor Create empty Texture out - */ data class TextureOut( var index: Int = 0, val textureId: Int, diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/egl/GlUtil.java b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/egl/GlUtil.java index ceab345ea..41c1d24e3 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/egl/GlUtil.java +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/egl/GlUtil.java @@ -47,24 +47,15 @@ import io.agora.beautyapi.sensetime.utils.LogUtils; -/** - * The type Gl util. - */ -public final class GlUtil { +public class GlUtil { private static final String TAG = "GlUtil"; + /** Identity matrix for general use. Don't modify or life will get weird. */ - /** - * Identity matrix for general use. Don't modify or life will get weird. - */ public static final int NO_TEXTURE = -1; private static final int SIZEOF_FLOAT = 4; - /** - * The constant IDENTITY_MATRIX. - */ public static final float[] IDENTITY_MATRIX = new float[16]; - static { Matrix.setIdentityM(IDENTITY_MATRIX, 0); } @@ -72,14 +63,6 @@ public final class GlUtil { private GlUtil() { // do not instantiate } - /** - * Create program int. - * - * @param applicationContext the application context - * @param vertexSourceRawId the vertex source raw id - * @param fragmentSourceRawId the fragment source raw id - * @return the int - */ public static int createProgram(Context applicationContext, @RawRes int vertexSourceRawId, @RawRes int fragmentSourceRawId) { @@ -89,13 +72,6 @@ public static int createProgram(Context applicationContext, @RawRes int vertexSo return createProgram(vertexSource, fragmentSource); } - /** - * Create program int. - * - * @param vertexSource the vertex source - * @param fragmentSource the fragment source - * @return the int - */ public static int createProgram(String vertexSource, String fragmentSource) { int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource); if (vertexShader == 0) { @@ -126,13 +102,6 @@ public static int createProgram(String vertexSource, String fragmentSource) { return program; } - /** - * Load shader int. - * - * @param shaderType the shader type - * @param source the source - * @return the int - */ public static int loadShader(int shaderType, String source) { int shader = GLES20.glCreateShader(shaderType); checkGlError("glCreateShader type=" + shaderType); @@ -149,17 +118,6 @@ public static int loadShader(int shaderType, String source) { return shader; } - /** - * Create texture int. - * - * @param textureTarget the texture target - * @param bitmap the bitmap - * @param minFilter the min filter - * @param magFilter the mag filter - * @param wrapS the wrap s - * @param wrapT the wrap t - * @return the int - */ public static int createTexture(int textureTarget, @Nullable Bitmap bitmap, int minFilter, int magFilter, int wrapS, int wrapT) { int[] textureHandle = new int[1]; @@ -181,37 +139,16 @@ public static int createTexture(int textureTarget, @Nullable Bitmap bitmap, int return textureHandle[0]; } - /** - * Create texture int. - * - * @param textureTarget the texture target - * @return the int - */ public static int createTexture(int textureTarget) { return createTexture(textureTarget, null, GLES20.GL_LINEAR, GLES20.GL_LINEAR, GLES20.GL_CLAMP_TO_EDGE, GLES20.GL_CLAMP_TO_EDGE); } - /** - * Create texture int. - * - * @param textureTarget the texture target - * @param bitmap the bitmap - * @return the int - */ public static int createTexture(int textureTarget, Bitmap bitmap) { return createTexture(textureTarget, bitmap, GLES20.GL_LINEAR, GLES20.GL_LINEAR, GLES20.GL_CLAMP_TO_EDGE, GLES20.GL_CLAMP_TO_EDGE); } - /** - * Init effect texture. - * - * @param width the width - * @param height the height - * @param textureId the texture id - * @param type the type - */ public static void initEffectTexture(int width, int height, int[] textureId, int type) { int len = textureId.length; if (len > 0) { @@ -231,11 +168,8 @@ public static void initEffectTexture(int width, int height, int[] textureId, int GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null); } } - /** * Checks to see if a GLES error has been raised. - * - * @param op the op */ public static void checkGlError(String op) { int error = GLES20.glGetError(); @@ -248,9 +182,6 @@ public static void checkGlError(String op) { /** * Allocates a direct float buffer, and populates it with the float array data. - * - * @param coords the coords - * @return the float buffer */ public static FloatBuffer createFloatBuffer(float[] coords) { // Allocate a direct ByteBuffer, using 4 bytes per float, and copy coords into it. @@ -262,13 +193,6 @@ public static FloatBuffer createFloatBuffer(float[] coords) { return fb; } - /** - * Read text from raw resource string. - * - * @param applicationContext the application context - * @param resourceId the resource id - * @return the string - */ public static String readTextFromRawResource(final Context applicationContext, @RawRes final int resourceId) { final InputStream inputStream = @@ -289,22 +213,14 @@ public static String readTextFromRawResource(final Context applicationContext, return body.toString(); } - /** - * Create transform matrix float [ ]. - * - * @param rotation the rotation - * @param flipH the flip h - * @param flipV the flip v - * @return the float [ ] - */ - public static float[] createTransformMatrix(int rotation, boolean flipH, boolean flipV) { + public static float[] createTransformMatrix(int rotation, boolean flipH, boolean flipV){ float[] renderMVPMatrix = new float[16]; float[] tmp = new float[16]; Matrix.setIdentityM(tmp, 0); boolean _flipH = flipH; boolean _flipV = flipV; - if (rotation % 180 != 0) { + if(rotation % 180 != 0){ _flipH = flipV; _flipV = flipH; } @@ -318,7 +234,7 @@ public static float[] createTransformMatrix(int rotation, boolean flipH, boolean float _rotation = rotation; if (_rotation != 0) { - if (_flipH != _flipV) { + if(_flipH != _flipV){ _rotation *= -1; } Matrix.rotateM(tmp, 0, tmp, 0, _rotation, 0, 0, 1); @@ -329,13 +245,8 @@ public static float[] createTransformMatrix(int rotation, boolean flipH, boolean return renderMVPMatrix; } - /** - * Gets curr gl context. - * - * @return the curr gl context - */ - public static EGLContext getCurrGLContext() { - EGL10 egl = (EGL10) EGLContext.getEGL(); + public static EGLContext getCurrGLContext(){ + EGL10 egl = (EGL10)EGLContext.getEGL(); if (egl != null && !Objects.equals(egl.eglGetCurrentContext(), EGL10.EGL_NO_CONTEXT)) { return egl.eglGetCurrentContext(); } diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/processor/Accelerometer.java b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/processor/Accelerometer.java index 6e0b04f1d..fa772e63d 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/processor/Accelerometer.java +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/processor/Accelerometer.java @@ -30,12 +30,8 @@ import android.hardware.SensorEventListener; import android.hardware.SensorManager; -/** - * The type Accelerometer. - */ public class Accelerometer { /** - * The enum Clockwise angle. * * @author MatrixCV * @@ -61,34 +57,12 @@ public class Accelerometer { * |+---------+| * |_____O_____| */ - public enum ClockwiseAngle { - /** - * Deg 0 clockwise angle. - */ - Deg0(0), - /** - * Deg 90 clockwise angle. - */ - Deg90(1), - /** - * Deg 180 clockwise angle. - */ - Deg180(2), - /** - * Deg 270 clockwise angle. - */ - Deg270(3); + public enum CLOCKWISE_ANGLE { + Deg0(0), Deg90(1), Deg180(2), Deg270(3); private int value; - - ClockwiseAngle(int value) { + private CLOCKWISE_ANGLE(int value){ this.value = value; } - - /** - * Gets value. - * - * @return the value - */ public int getValue() { return value; } @@ -98,30 +72,28 @@ public int getValue() { private boolean hasStarted = false; - private ClockwiseAngle rotation; + private CLOCKWISE_ANGLE rotation; private SensorEvent sensorEvent; /** - * Instantiates a new Accelerometer. * - * @param ctx 用Activity初始化获得传感器 + * @param ctx + * 用Activity初始化获得传感器 */ public Accelerometer(Context ctx) { sensorManager = (SensorManager) ctx .getSystemService(Context.SENSOR_SERVICE); - rotation = ClockwiseAngle.Deg90; + rotation = CLOCKWISE_ANGLE.Deg90; } /** * 开始对传感器的监听 */ public void start() { - if (hasStarted) { - return; - } + if (hasStarted) return; hasStarted = true; - rotation = ClockwiseAngle.Deg90; + rotation = CLOCKWISE_ANGLE.Deg90; sensorManager.registerListener(accListener, sensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER), SensorManager.SENSOR_DELAY_NORMAL); @@ -131,27 +103,20 @@ public void start() { * 结束对传感器的监听 */ public void stop() { - if (!hasStarted) { - return; - } + if (!hasStarted) return; hasStarted = false; sensorManager.unregisterListener(accListener); } /** - * Gets direction. * - * @return 返回当前手机转向 direction + * @return + * 返回当前手机转向 */ public int getDirection() { return rotation.getValue(); } - /** - * Gets sensor event. - * - * @return the sensor event - */ public SensorEvent getSensorEvent() { return sensorEvent; } @@ -170,18 +135,19 @@ public void onSensorChanged(SensorEvent arg0) { if (arg0.sensor.getType() == Sensor.TYPE_ACCELEROMETER) { float x = arg0.values[0]; float y = arg0.values[1]; - if (Math.abs(x) > 3 || Math.abs(y) > 3) { - if (Math.abs(x) > Math.abs(y)) { + float z = arg0.values[2]; + if (Math.abs(x)>3 || Math.abs(y)>3) { + if (Math.abs(x)> Math.abs(y)) { if (x > 0) { - rotation = ClockwiseAngle.Deg0; + rotation = CLOCKWISE_ANGLE.Deg0; } else { - rotation = ClockwiseAngle.Deg180; + rotation = CLOCKWISE_ANGLE.Deg180; } } else { if (y > 0) { - rotation = ClockwiseAngle.Deg90; + rotation = CLOCKWISE_ANGLE.Deg90; } else { - rotation = ClockwiseAngle.Deg270; + rotation = CLOCKWISE_ANGLE.Deg270; } } } diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/processor/BeautyProcessor.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/processor/BeautyProcessor.kt index cfadc9996..164406442 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/processor/BeautyProcessor.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/processor/BeautyProcessor.kt @@ -20,13 +20,8 @@ import io.agora.beautyapi.sensetime.utils.LogUtils import io.agora.beautyapi.sensetime.utils.egl.GLCopyHelper import io.agora.beautyapi.sensetime.utils.egl.GLFrameBuffer import io.agora.beautyapi.sensetime.utils.egl.GLTextureBufferQueue -import io.agora.beautyapi.sensetime.utils.processor.Accelerometer.ClockwiseAngle +import io.agora.beautyapi.sensetime.utils.processor.Accelerometer.CLOCKWISE_ANGLE -/** - * Beauty processor - * - * @constructor Create empty Beauty processor - */ class BeautyProcessor : IBeautyProcessor { private val TAG = this::class.java.simpleName @@ -48,17 +43,20 @@ class BeautyProcessor : IBeautyProcessor { private var mCustomEvent = 0 private var mInputWidth = 0 private var mInputHeight = 0 + private var mInputOrientation = 0 private var isLastFrontCamera = false + private var skipFrame = 0 + private var processMode = ProcessMode.DOUBLE_INPUT @Volatile private var isReleased = false - /** - * Initialize - * - * @param effectNative - * @param humanActionNative - */ + enum class ProcessMode { + DOUBLE_INPUT, + SINGLE_BYTES_INPUT, + SINGLE_TEXTURE_INPUT + } + override fun initialize( effectNative: STMobileEffectNative, humanActionNative: STMobileHumanActionNative @@ -67,10 +65,6 @@ class BeautyProcessor : IBeautyProcessor { mFaceDetector = FaceDetector(humanActionNative, effectNative) } - /** - * Release - * - */ override fun release() { isReleased = true mFaceDetector.release() @@ -97,21 +91,10 @@ class BeautyProcessor : IBeautyProcessor { mSTMobileHardwareBufferNative = null } - /** - * Enable sensor - * - * @param context - * @param enable - */ override fun enableSensor(context: Context, enable: Boolean) { mFaceDetector.enableSensor(context, enable) } - /** - * Trigger screen tap - * - * @param isDouble - */ override fun triggerScreenTap(isDouble: Boolean) { LogUtils.d( TAG, @@ -126,24 +109,36 @@ class BeautyProcessor : IBeautyProcessor { } - /** - * Process - * - * @param input - * @return - */ override fun process(input: InputInfo): OutputInfo? { if (isReleased) { return null } return if (input.bytes != null && input.textureId != null) { + if(processMode != ProcessMode.DOUBLE_INPUT){ + processMode = ProcessMode.DOUBLE_INPUT + if (mInputWidth > 0 || mInputHeight > 0) { + skipFrame = 3 + } + } processDoubleInput(input) } else if (input.bytes != null) { + if(processMode != ProcessMode.SINGLE_BYTES_INPUT){ + processMode = ProcessMode.SINGLE_BYTES_INPUT + if (mInputWidth > 0 || mInputHeight > 0) { + skipFrame = 3 + } + } processSingleBytesInput(input) } else if (input.textureId != null && Build.VERSION.SDK_INT >= 26) { + if(processMode != ProcessMode.SINGLE_TEXTURE_INPUT){ + processMode = ProcessMode.SINGLE_TEXTURE_INPUT + if (mInputWidth > 0 || mInputHeight > 0) { + skipFrame = 3 + } + } processSingleTextureInput(input) } else { - null + throw RuntimeException("Single texture input is not supported when SDK_INT < 26!"); } } @@ -165,6 +160,7 @@ class BeautyProcessor : IBeautyProcessor { if (mSTMobileHardwareBufferNative == null) { mProcessWidth = width mProcessHeight = height + glFrameBuffer.resizeTexture(processInTextureId, width, height) mSTMobileHardwareBufferNative = STMobileHardwareBufferNative().apply { init( width, @@ -218,7 +214,7 @@ class BeautyProcessor : IBeautyProcessor { input.isFrontCamera, input.isMirror, input.cameraOrientation, - input.timestamp + input.timestamp, ) ) } @@ -269,7 +265,7 @@ class BeautyProcessor : IBeautyProcessor { input.isFrontCamera, input.isMirror, input.cameraOrientation, - input.timestamp + input.timestamp, ) ) } @@ -281,14 +277,20 @@ class BeautyProcessor : IBeautyProcessor { if (input.bytes == null || input.textureId == null) { return null } - if (mInputWidth != input.width || mInputHeight != input.height || isLastFrontCamera != input.isFrontCamera) { + if (mInputWidth != input.width || mInputHeight != input.height || mInputOrientation != input.cameraOrientation || isLastFrontCamera != input.isFrontCamera) { + if(mInputWidth > 0 || mInputHeight > 0){ + skipFrame = 3 + } mInputWidth = input.width mInputHeight = input.height + mInputOrientation = input.cameraOrientation isLastFrontCamera = input.isFrontCamera reset() return null } + + val diff = glTextureBufferQueue.size() - mFaceDetector.size() if(diff < input.diffBetweenBytesAndTexture){ glTextureBufferQueue.enqueue( @@ -359,6 +361,11 @@ class BeautyProcessor : IBeautyProcessor { ) ) + if(skipFrame > 0){ + skipFrame -- + return null + } + return out } @@ -433,16 +440,23 @@ class BeautyProcessor : IBeautyProcessor { STEffectParam.EFFECT_PARAM_USE_INPUT_TIMESTAMP, 1.0f ) + if (isReleased) { + return -1 + } mSTMobileEffectNative.render( sTEffectRenderInParam, stEffectRenderOutParam, false ) + if (event == mCustomEvent) { mCustomEvent = 0 } + if (isReleased) { + return -1 + } glFrameBuffer.setSize(width, height) glFrameBuffer.resetTransform() glFrameBuffer.setFlipV(true) @@ -455,10 +469,6 @@ class BeautyProcessor : IBeautyProcessor { return finalOutTextureId } - /** - * Reset - * - */ override fun reset() { mFaceDetector.reset() glTextureBufferQueue.reset() @@ -474,7 +484,7 @@ class BeautyProcessor : IBeautyProcessor { private fun getCurrentOrientation(): Int { - val dir = mFaceDetector.getAccelerometer()?.direction ?: ClockwiseAngle.Deg90.value + val dir = mFaceDetector.getAccelerometer()?.direction ?: CLOCKWISE_ANGLE.Deg90.value var orientation = dir - 1 if (orientation < 0) { orientation = dir xor 3 diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/processor/FaceDetector.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/processor/FaceDetector.kt index 7c6620f16..32784b124 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/processor/FaceDetector.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/processor/FaceDetector.kt @@ -38,13 +38,6 @@ import java.util.concurrent.ConcurrentLinkedQueue import java.util.concurrent.Executors import java.util.concurrent.Future -/** - * Face detector - * - * @property humanActionNative - * @property effectNative - * @constructor Create empty Face detector - */ class FaceDetector( private val humanActionNative: STMobileHumanActionNative, private val effectNative: STMobileEffectNative @@ -59,12 +52,6 @@ class FaceDetector( private val cacheFutureQueue = ConcurrentLinkedQueue>() private var isDequeBegin = false - /** - * Enable sensor - * - * @param context - * @param enable - */ fun enableSensor(context: Context, enable: Boolean) { if (enable) { if (accelerometer == null) { @@ -80,16 +67,8 @@ class FaceDetector( } } - /** - * Get accelerometer - * - */ fun getAccelerometer() = accelerometer - /** - * Reset - * - */ fun reset() { cacheIndex = 0 isDequeBegin = false @@ -100,22 +79,12 @@ class FaceDetector( } } - /** - * Release - * - */ fun release(){ reset() accelerometer?.stop() workerThread.shutdownNow() } - /** - * Enqueue - * - * @param iN - * @return - */ fun enqueue(iN: DetectorIn): Int { val index = cacheIndex val size = cacheFutureQueue.size @@ -133,11 +102,6 @@ class FaceDetector( return size } - /** - * Dequeue - * - * @return - */ fun dequeue(): DetectorOut? { val size = cacheFutureQueue.size if(isDequeBegin || size >= cacheSize){ @@ -157,10 +121,6 @@ class FaceDetector( return null } - /** - * Size - * - */ fun size() = cacheFutureQueue.size private fun detectHuman(iN: DetectorIn, index: Int) { @@ -170,7 +130,7 @@ class FaceDetector( iN.orientation ) val deviceOrientation: Int = - accelerometer?.direction ?: Accelerometer.ClockwiseAngle.Deg90.value + accelerometer?.direction ?: Accelerometer.CLOCKWISE_ANGLE.Deg90.value val ret: Int = humanActionNative.nativeHumanActionDetectPtr( iN.bytes, iN.bytesType, @@ -189,15 +149,26 @@ class FaceDetector( if(iN.isMirror){ mirror = !mirror } - STHumanAction.nativeHumanActionRotateAndMirror( - humanActionNative, - humanActionNative.nativeHumanActionResultPtr, - rotatedSize.width, - rotatedSize.height, - if (mirror) Camera.CameraInfo.CAMERA_FACING_FRONT else Camera.CameraInfo.CAMERA_FACING_BACK, - iN.orientation, - deviceOrientation - ) + if (iN.orientation == 0 || iN.orientation == 180) { + if (mirror) { + humanActionNative.nativeHumanActionMirrorPtr(rotatedSize.width) + } + if(iN.orientation == 180){ + humanActionNative.nativeHumanActionRotatePtr(rotatedSize.width, rotatedSize.height, STRotateType.ST_CLOCKWISE_ROTATE_180, false) + } + } else { + STHumanAction.nativeHumanActionRotateAndMirror( + humanActionNative, + humanActionNative.nativeHumanActionResultPtr, + rotatedSize.width, + rotatedSize.height, + if (mirror) Camera.CameraInfo.CAMERA_FACING_FRONT else Camera.CameraInfo.CAMERA_FACING_BACK, + iN.orientation, + deviceOrientation + ) + } + + humanActionNative.updateNativeHumanActionCache(index) } @@ -209,7 +180,7 @@ class FaceDetector( */ private fun getHumanActionOrientation(frontCamera: Boolean, cameraRotation: Int): Int { //获取重力传感器返回的方向 - var orientation: Int = accelerometer?.direction ?: Accelerometer.ClockwiseAngle.Deg90.value + var orientation: Int = accelerometer?.direction ?: Accelerometer.CLOCKWISE_ANGLE.Deg90.value //在使用后置摄像头,且传感器方向为0或2时,后置摄像头与前置orientation相反 if (!frontCamera && orientation == STRotateType.ST_CLOCKWISE_ROTATE_0) { @@ -229,18 +200,6 @@ class FaceDetector( } - /** - * Detector in - * - * @property bytes - * @property bytesType - * @property width - * @property height - * @property isFront - * @property isMirror - * @property orientation - * @constructor Create empty Detector in - */ data class DetectorIn( val bytes: ByteArray, val bytesType: Int, @@ -248,16 +207,9 @@ class FaceDetector( val height: Int, val isFront: Boolean, val isMirror: Boolean, - val orientation: Int + val orientation: Int, ) - /** - * Detector out - * - * @property humanResult - * @property animalResult - * @constructor Create empty Detector out - */ data class DetectorOut( val humanResult: Long, val animalResult: STMobileAnimalResult? = null diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/processor/IBeautyProcessor.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/processor/IBeautyProcessor.kt index 73b979fb7..4cfa22e62 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/processor/IBeautyProcessor.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/processor/IBeautyProcessor.kt @@ -6,23 +6,6 @@ import com.softsugar.stmobile.STCommonNative import com.softsugar.stmobile.STMobileEffectNative import com.softsugar.stmobile.STMobileHumanActionNative -/** - * Input info - * - * @property bytes - * @property bytesType - * @property textureId - * @property textureType - * @property textureMatrix - * @property diffBetweenBytesAndTexture - * @property width - * @property height - * @property isFrontCamera - * @property isMirror - * @property cameraOrientation - * @property timestamp - * @constructor Create empty Input info - */ data class InputInfo( val bytes: ByteArray? = null, val bytesType: Int = STCommonNative.ST_PIX_FMT_NV21, @@ -35,22 +18,10 @@ data class InputInfo( val isFrontCamera: Boolean, val isMirror: Boolean, val cameraOrientation: Int, - val timestamp: Long + val timestamp: Long, ) -/** - * Output info - * - * @property textureId - * @property textureType - * @property width - * @property height - * @property timestamp - * @property errorCode - * @property errorMessage - * @constructor Create empty Output info - */ class OutputInfo( val textureId: Int = 0, val textureType: Int = GLES20.GL_TEXTURE_2D, @@ -58,67 +29,26 @@ class OutputInfo( val height: Int = 0, val timestamp: Long = 0, val errorCode: Int = 0, - val errorMessage: String = "" + val errorMessage: String = "", ) -/** - * I beauty processor - * - * @constructor Create empty I beauty processor - */ interface IBeautyProcessor { - /** - * Initialize - * - * @param effectNative - * @param humanActionNative - */ fun initialize( effectNative: STMobileEffectNative, // 美颜效果处理句柄 - humanActionNative: STMobileHumanActionNative // 人脸检测句柄 + humanActionNative: STMobileHumanActionNative, // 人脸检测句柄 ) - /** - * Process - * - * @param input - * @return - */ fun process(input: InputInfo): OutputInfo? - /** - * Enable sensor - * - * @param context - * @param enable - */ fun enableSensor(context: Context, enable: Boolean) - /** - * Trigger screen tap - * - * @param isDouble - */ fun triggerScreenTap(isDouble: Boolean) - /** - * Reset - * - */ fun reset() - - /** - * Release - * - */ + fun release() } -/** - * Create beauty processor - * - * @return - */ fun createBeautyProcessor(): IBeautyProcessor = BeautyProcessor() \ No newline at end of file diff --git a/Android/APIExample/app/src/main/res/layout/fragment_beauty_faceunity.xml b/Android/APIExample/app/src/main/res/layout/fragment_beauty_faceunity.xml index cd18a6613..2aa57b690 100644 --- a/Android/APIExample/app/src/main/res/layout/fragment_beauty_faceunity.xml +++ b/Android/APIExample/app/src/main/res/layout/fragment_beauty_faceunity.xml @@ -96,16 +96,6 @@ android:text="@string/sticker" android:textColor="@color/sel_text_yellow_white" /> - - \ No newline at end of file diff --git a/Android/APIExample/app/src/main/res/layout/fragment_live_streaming.xml b/Android/APIExample/app/src/main/res/layout/fragment_live_streaming.xml index a14219b00..5c73d2126 100644 --- a/Android/APIExample/app/src/main/res/layout/fragment_live_streaming.xml +++ b/Android/APIExample/app/src/main/res/layout/fragment_live_streaming.xml @@ -63,6 +63,14 @@ app:layout_constraintBottom_toTopOf="@id/ll_join" app:layout_constraintStart_toEndOf="@id/btn_publish" /> + + + + + + + + + + + \ No newline at end of file diff --git a/Android/APIExample/app/src/main/res/layout/fragment_media_metadata.xml b/Android/APIExample/app/src/main/res/layout/fragment_media_metadata.xml new file mode 100644 index 000000000..9a5d34343 --- /dev/null +++ b/Android/APIExample/app/src/main/res/layout/fragment_media_metadata.xml @@ -0,0 +1,93 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Android/APIExample/app/src/main/res/layout/fragment_picture_in_picture.xml b/Android/APIExample/app/src/main/res/layout/fragment_picture_in_picture.xml index 5c896beac..75b72fb89 100644 --- a/Android/APIExample/app/src/main/res/layout/fragment_picture_in_picture.xml +++ b/Android/APIExample/app/src/main/res/layout/fragment_picture_in_picture.xml @@ -1,65 +1,88 @@ - - - - - - - + android:layout_height="match_parent" + android:orientation="vertical"> + + + + + android:layout_width="0dp" + android:layout_height="match_parent" + android:layout_weight="1" /> + - + + + + + - + + + + + @@ -156,18 +164,19 @@ - + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -236,7 +278,7 @@ - + + + + + + - + + - + @@ -50,7 +69,7 @@ - + diff --git a/iOS/APIExample/APIExample/Examples/Advanced/RawAudioData/RawAudioData.swift b/iOS/APIExample/APIExample/Examples/Advanced/RawAudioData/RawAudioData.swift index 2ffb7b0f0..5dd055894 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/RawAudioData/RawAudioData.swift +++ b/iOS/APIExample/APIExample/Examples/Advanced/RawAudioData/RawAudioData.swift @@ -18,6 +18,7 @@ import AGEVideoLayout class RawAudioDataViewController: BaseViewController { @IBOutlet weak var videoContainer: AGEVideoContainer! + @IBOutlet weak var textfield: UITextField! let localVideo = Bundle.loadVideoView(type: .local, audioOnly: true) let remoteVideo = Bundle.loadVideoView(type: .remote, audioOnly: true) @@ -36,6 +37,9 @@ class RawAudioDataViewController: BaseViewController { Util.configPrivatization(agoraKit: agoraKit) agoraKit.setClientRole(GlobalSettings.shared.getUserRole()) + // Audio4 is required to send Audio Meta Data. + agoraKit.setParameters("{\"rtc.use_audio4\":true}") + // Setup raw auido data frame observer agoraKit.setAudioFrameDelegate(self) agoraKit.enableAudio() @@ -49,6 +53,11 @@ class RawAudioDataViewController: BaseViewController { }) } + @IBAction func onTapSendButton(_ sender: Any) { + guard let data = textfield.text?.data(using: .utf8) else { return } + agoraKit.sendAudioMetadata(data) + textfield.text = "" + } override func didMove(toParent parent: UIViewController?) { if parent == nil { agoraKit.setAudioFrameDelegate(nil) @@ -116,6 +125,10 @@ extension RawAudioDataViewController: AgoraAudioFrameDelegate { // MARK: - AgoraRtcEngineDelegate extension RawAudioDataViewController: AgoraRtcEngineDelegate { + func rtcEngine(_ engine: AgoraRtcEngineKit, audioMetadataReceived uid: UInt, metadata: Data) { + let data = String(data: metadata, encoding: .utf8) ?? "" + ToastView.show(text: "uid: \(uid) data: \(data)") + } // en: https://api-ref.agora.io/en/video-sdk/ios/4.x/documentation/agorartckit/agoraerrorcode // cn: https://doc.shengwang.cn/api-ref/rtc/ios/error-code func rtcEngine(_ engine: AgoraRtcEngineKit, didOccurError errorCode: AgoraErrorCode) { diff --git a/iOS/APIExample/APIExample/Examples/Advanced/RawAudioData/zh-Hans.lproj/RawAudioData.strings b/iOS/APIExample/APIExample/Examples/Advanced/RawAudioData/zh-Hans.lproj/RawAudioData.strings index 12cb6b060..778452e01 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/RawAudioData/zh-Hans.lproj/RawAudioData.strings +++ b/iOS/APIExample/APIExample/Examples/Advanced/RawAudioData/zh-Hans.lproj/RawAudioData.strings @@ -4,3 +4,6 @@ /* Class = "UIButton"; normalTitle = "Join"; ObjectID = "yhH-81-equ"; */ "yhH-81-equ.normalTitle" = "加入频道"; + +"kTh-3L-D7c" = "发送"; +"DPe-Im-fsd.placeholder" = "请输入数据"; diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ScreenShare/Base.lproj/ScreenShare.storyboard b/iOS/APIExample/APIExample/Examples/Advanced/ScreenShare/Base.lproj/ScreenShare.storyboard index 6da1e8e03..acc067f7f 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ScreenShare/Base.lproj/ScreenShare.storyboard +++ b/iOS/APIExample/APIExample/Examples/Advanced/ScreenShare/Base.lproj/ScreenShare.storyboard @@ -1,9 +1,9 @@ - + - + @@ -20,7 +20,7 @@ - + @@ -278,7 +278,7 @@ - + diff --git a/iOS/APIExample/APIExample/Examples/Advanced/SpatialAudio/SpatialAudio.swift b/iOS/APIExample/APIExample/Examples/Advanced/SpatialAudio/SpatialAudio.swift index 04032ae75..b883e73d8 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/SpatialAudio/SpatialAudio.swift +++ b/iOS/APIExample/APIExample/Examples/Advanced/SpatialAudio/SpatialAudio.swift @@ -81,13 +81,13 @@ class SpatialAudioMain: BaseViewController { super.viewDidAppear(animated) guard isJoined == false else { return } mediaPlayer1 = agoraKit.createMediaPlayer(with: self) - mediaPlayer1.setLoopCount(10000) + mediaPlayer1.setLoopCount(-1) mediaPlayer1.open("https://webdemo.agora.io/audiomixing.mp3", startPos: 0) localSpatial.updatePlayerPositionInfo(Int(mediaPlayer1.getMediaPlayerId()), positionInfo: getPlayerPostion(view: voiceButton1)) localSpatial.setPlayerAttenuation(0.2, playerId: UInt(mediaPlayer1.getMediaPlayerId()), forceSet: false) mediaPlayer2 = agoraKit.createMediaPlayer(with: self) - mediaPlayer2.setLoopCount(10000) + mediaPlayer2.setLoopCount(-1) mediaPlayer2.open("https://webdemo.agora.io/dang.mp3", startPos: 0) localSpatial.updatePlayerPositionInfo(Int(mediaPlayer2.getMediaPlayerId()), positionInfo: getPlayerPostion(view: voiceButton2)) localSpatial.setPlayerAttenuation(0.2, playerId: UInt(mediaPlayer2.getMediaPlayerId()), forceSet: false) diff --git a/iOS/APIExample/APIExample/Examples/Advanced/StreamEncryption/StreamEncryption.swift b/iOS/APIExample/APIExample/Examples/Advanced/StreamEncryption/StreamEncryption.swift index 79434f112..61287da6d 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/StreamEncryption/StreamEncryption.swift +++ b/iOS/APIExample/APIExample/Examples/Advanced/StreamEncryption/StreamEncryption.swift @@ -184,7 +184,11 @@ class StreamEncryptionMain: BaseViewController { } func getEncryptionSaltFromServer() -> Data { - return "EncryptionKdfSaltInBase64Strings".data(using: .utf8) ?? Data() + // Salt string should be the output of the following command: + // openssl rand -base64 32 + let saltBase64String = "NiIeJ08AbtcQVjvV+oOEvF/4Dz5dy1CIwa805C8J2w0=" + let data = Data(base64Encoded: saltBase64String.data(using: .utf8) ?? Data()) + return data ?? Data() } override func willMove(toParent parent: UIViewController?) { diff --git a/iOS/APIExample/APIExample/Info.plist b/iOS/APIExample/APIExample/Info.plist index 448b7a5ce..989504b53 100644 --- a/iOS/APIExample/APIExample/Info.plist +++ b/iOS/APIExample/APIExample/Info.plist @@ -43,8 +43,10 @@ Request Camera Access NSMicrophoneUsageDescription Request Mic Access - UIFileSharingEnabled - + UIBackgroundModes + + audio + UILaunchStoryboardName LaunchScreen UIMainStoryboardFile diff --git a/iOS/APIExample/APIExample/zh-Hans.lproj/Localizable.strings b/iOS/APIExample/APIExample/zh-Hans.lproj/Localizable.strings index 554f13bf4..d994de594 100644 --- a/iOS/APIExample/APIExample/zh-Hans.lproj/Localizable.strings +++ b/iOS/APIExample/APIExample/zh-Hans.lproj/Localizable.strings @@ -175,3 +175,11 @@ "ULTRALOWLATENCY" = "低延时强降噪模式"; "Face Capture" = "面捕数据"; "Please contact Agora customer service to obtain a face capture certificate" = "请联系声网客服获取面捕证书"; +"Second camera" = "第二路摄像头"; +"This device does not support Center Stage" = "此设备不支持 Center Stage"; +"Wide" = "广角"; +"Length Wide" = "超广角"; +"Telephoto" = "长焦"; +"Front camera" = "前置"; +"Rear camera" = "后置"; +"The camera has changed:" = "摄像头已改变:"; diff --git a/iOS/APIExample/Podfile b/iOS/APIExample/Podfile index 67c3f92ff..70809019b 100644 --- a/iOS/APIExample/Podfile +++ b/iOS/APIExample/Podfile @@ -1,4 +1,4 @@ -Podfile# Uncomment the next line to define a global platform for your project +# Uncomment the next line to define a global platform for your project # platform :ios, '9.0' #source 'https://mirrors.tuna.tsinghua.edu.cn/git/CocoaPods/Specs.git' @@ -10,8 +10,8 @@ target 'APIExample' do pod 'CocoaAsyncSocket', '7.6.5' pod 'ijkplayer', '~> 1.1.3' pod 'SwiftLint', '~> 0.53.0' - pod 'AgoraRtcEngine_iOS', '4.3.0' - # pod 'sdk', :path => 'sdk.podspec' + pod 'AgoraRtcEngine_iOS', '4.3.1' +# pod 'sdk', :path => 'sdk.podspec' # pod 'senseLib', :path => 'sense.podspec' # pod 'bytedEffect', :path => 'bytedEffect.podspec' # pod 'fuLib', :path => 'fu.podspec' @@ -19,12 +19,16 @@ end target 'Agora-ScreenShare-Extension' do use_frameworks! - # pod 'sdk', :path => 'sdk.podspec' -pod 'AgoraRtcEngine_iOS', '4.3.0' +# pod 'sdk', :path => 'sdk.podspec' + pod 'AgoraRtcEngine_iOS', '4.3.1' end target 'SimpleFilter' do use_frameworks! - # pod 'sdk', :path => 'sdk.podspec' - pod 'AgoraRtcEngine_iOS', '4.3.0' +# pod 'sdk', :path => 'sdk.podspec' + pod 'AgoraRtcEngine_iOS', '4.3.1' +end + +pre_install do |installer| +# system("sh .download_script.sh 4.3.1 true") end diff --git a/iOS/APIExample/ijkplayer.podspec b/iOS/APIExample/ijkplayer.podspec new file mode 100644 index 000000000..540ae72c6 --- /dev/null +++ b/iOS/APIExample/ijkplayer.podspec @@ -0,0 +1,34 @@ +Pod::Spec.new do |s| + s.name = "ijkplayer" + s.version = "1.1.3" + s.summary = "ijkplayer framework." + + s.description = <<-DESC +bilibili/ijkplayer k0.8.8 IJKMediaFramework 上传到 cococapods + DESC + + s.homepage = "https://github.com/iOSDevLog/ijkplayer" + + s.license = { :type => "GNU Lesser General Public License v2.1", :text => <<-LICENSE + GNU LESSER GENERAL PUBLIC LICENSE + Version 2.1, February 1999 + https://github.com/iOSDevLog/ijkplayer/raw/master/LICENSE + LICENSE + } + + s.author = { "iosdevlog" => "iosdevlog@iosdevlog.com" } + s.social_media_url = "http://weibo.com/iOSDevLog" + + s.platform = :ios, "7.0" + + s.source = { :http => "" } + # s.source = { :http => "https://github.com/iOSDevLog/ijkplayer/releases/download/#{s.version}/IJKMediaFramework.framework.zip" } + + s.vendored_frameworks = 'IJKMediaFramework.framework' + + s.frameworks = "AudioToolbox", "AVFoundation", "CoreGraphics", "CoreMedia", "CoreVideo", "MobileCoreServices", "OpenGLES", "QuartzCore", "VideoToolbox", "Foundation", "UIKit", "MediaPlayer" + s.libraries = "bz2", "z", "stdc++" + + s.requires_arc = true + +end diff --git a/iOS/APIExample/package_script.sh b/iOS/APIExample/package_script.sh new file mode 100755 index 000000000..f67d289bd --- /dev/null +++ b/iOS/APIExample/package_script.sh @@ -0,0 +1,15 @@ + + +http_proxy=http://10.10.114.51:1080 https_proxy=http://10.10.114.51:1080 pod install --repo-update +if [ $? -eq 0 ]; then + echo "pod success" +else + echo "pod failed" + exit 1 +fi + +script_path="$( cd "$(dirname "$0")" ; pwd -P )" +echo $script_path +current_datetime=$(date +'%Y-%m-%d_%H-%M-%S') +echo $current_datetime +zip -r ${1}_Swift_${current_datetime}.zip . \ No newline at end of file diff --git a/macOS/.download_script.sh b/macOS/.download_script.sh new file mode 100755 index 000000000..dbd4a6363 --- /dev/null +++ b/macOS/.download_script.sh @@ -0,0 +1,25 @@ +#!/bin/bash +version=$1 +url="https://download.agora.io/sdk/release/Agora_Native_SDK_for_Mac_v${version}_FULL.zip" +zip_filename=$(basename "$url") + +if [ -d "libs" ] && [ "$2" = "false" ]; then + echo "Folder exists" + exit; +fi + +rm -rf libs + +# 使用curl命令下载文件 +echo 'download Agora RTC SDK...' +curl -o "$zip_filename" "$url" + +unzip $zip_filename + +folder_name=$(unzip -qql $zip_filename | head -n1 | tr -s ' ' | cut -d' ' -f5-) # 获取文件夹名称 +mv "${folder_name}libs" . + +rm -rf $zip_filename +rm -rf $folder_name + +echo "download sdk finished!" \ No newline at end of file diff --git a/macOS/APIExample/Examples/Advanced/FaceCapture/FaceCapture.swift b/macOS/APIExample/Examples/Advanced/FaceCapture/FaceCapture.swift index 996f7f970..c930f92df 100644 --- a/macOS/APIExample/Examples/Advanced/FaceCapture/FaceCapture.swift +++ b/macOS/APIExample/Examples/Advanced/FaceCapture/FaceCapture.swift @@ -193,6 +193,7 @@ class FaceCaptureMain: BaseViewController { channelField.isEnabled = !isJoined initJoinChannelButton() agoraKit.setVideoFrameDelegate(isJoined ? self : nil) + agoraKit.setFaceInfoDelegate(isJoined ? self : nil) } } @@ -229,6 +230,16 @@ class FaceCaptureMain: BaseViewController { value: "{\"company_id\":\"agoraTest\"," + "\"license\":\"" + (KeyCenter.FaceCaptureLicense ?? "") + "\"}", sourceType: .primaryCamera) + + agoraKit.enableExtension(withVendor: "agora_filters_lip_sync", + extension: "lip_sync", + enabled: true, + sourceType: .speechDriven) + agoraKit.setExtensionPropertyWithVendor("agora_filters_lip_sync", + extension: "lip_sync", + key: "parameters", + value: "{\"company_id\":\"agoraTest\", \"license\":\"abc\", \"open_agc\":true}", + sourceType: .speechDriven) } initSelectCameraPicker() @@ -356,10 +367,17 @@ class FaceCaptureMain: BaseViewController { } } +extension FaceCaptureMain: AgoraFaceInfoDelegate { + func onFaceInfo(_ outFaceInfo: String) -> Bool { + videos[0].statsInfo?.updateMetaInfo(data: outFaceInfo) + return true + } +} + extension FaceCaptureMain: AgoraVideoFrameDelegate { func onCapture(_ videoFrame: AgoraOutputVideoFrame, sourceType: AgoraVideoSourceType) -> Bool { let info = videoFrame.metaInfo["KEY_FACE_CAPTURE"] as? String - videos[0].statsInfo?.updateMetaInfo(data: info) + LogUtils.log(message: info ?? "", level: .info) return true } func getVideoFrameProcessMode() -> AgoraVideoFrameProcessMode { diff --git a/macOS/APIExample/Examples/Advanced/LiveStreaming/Base.lproj/LiveStreaming.storyboard b/macOS/APIExample/Examples/Advanced/LiveStreaming/Base.lproj/LiveStreaming.storyboard index 8791fa9bb..3d12b4a0c 100644 --- a/macOS/APIExample/Examples/Advanced/LiveStreaming/Base.lproj/LiveStreaming.storyboard +++ b/macOS/APIExample/Examples/Advanced/LiveStreaming/Base.lproj/LiveStreaming.storyboard @@ -1,8 +1,8 @@ - + - + @@ -10,20 +10,21 @@ - + + - + - - - + - + + - + + @@ -36,10 +37,6 @@ - - - - @@ -48,194 +45,41 @@ - - + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + - - - - - - - - - - + + + + @@ -245,13 +89,14 @@ - + - + + @@ -260,8 +105,9 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - + - - - + @@ -329,15 +147,227 @@ - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/macOS/APIExample/Examples/Advanced/LiveStreaming/LiveStreaming.swift b/macOS/APIExample/Examples/Advanced/LiveStreaming/LiveStreaming.swift index 232a42e07..87d3e9c1f 100644 --- a/macOS/APIExample/Examples/Advanced/LiveStreaming/LiveStreaming.swift +++ b/macOS/APIExample/Examples/Advanced/LiveStreaming/LiveStreaming.swift @@ -10,11 +10,11 @@ import AgoraRtcKit import AGEVideoLayout class LiveStreamingMain: BaseViewController { - + @IBOutlet weak var scrollView: NSScrollView! var agoraKit: AgoraRtcEngineKit! var remoteUid: UInt = 0 { didSet { - snapShot.isEnabled = remoteUid != 0 + functionVC?.snapShot.isEnabled = remoteUid != 0 } } var isPreloadChannel: Bool = false { @@ -237,6 +237,7 @@ class LiveStreamingMain: BaseViewController { return nil } } + func initSelectRolePicker() { selectRolePicker.label.stringValue = "Role".localized selectRolePicker.picker.addItems(withTitles: roles.map { $0.description() }) @@ -251,103 +252,31 @@ class LiveStreamingMain: BaseViewController { self.agoraKit.setClientRole(selected) _ = selected == .broadcaster ? self.agoraKit.startPreview() : self.agoraKit.stopPreview() } - self.waterMarkContainer.isHidden = selected == .audience - self.bFrameContainer.isHidden = selected == .audience - self.encoderSegment.isHidden = selected == .audience - self.videoImageContainerView.isHidden = selected == .audience + self.functionVC?.waterMarkContainer.isHidden = selected == .audience + self.functionVC?.bFrameContainer.isHidden = selected == .audience + self.functionVC?.encoderSegment.isHidden = selected == .audience + self.functionVC?.videoImageContainerView.isHidden = selected == .audience } } - @IBOutlet weak var snapShot: NSButton! - @IBAction func onTakeSnapshot(_ sender: Any) { - let filePath = FileManager.default.urls(for: .downloadsDirectory, in: .userDomainMask).first?.absoluteString - let programPath = filePath?.components(separatedBy: "/")[4] ?? "" - let path = "/Users/\(programPath)/Downloads/1.png" - agoraKit.takeSnapshot(Int(remoteUid), filePath: path) - } - - @IBOutlet weak var waterMarkContainer: NSView! - @IBAction func onWaterMark(_ sender: NSSwitch) { - if sender.state == .on { - if let filepath = Bundle.main.path(forResource: "agora-logo", ofType: "png") { - if let url = URL(string: filepath) { - let waterMark = WatermarkOptions() - waterMark.visibleInPreview = true - let localVideo = self.videos[0] - waterMark.positionInPortraitMode = localVideo.frame.offsetBy(dx: 20, dy: 20) - waterMark.positionInLandscapeMode = localVideo.frame.offsetBy(dx: 20, dy: 20) - agoraKit.addVideoWatermark(url, options: waterMark) - } - } - } else { - agoraKit.clearVideoWatermarks() - } - } - - @IBOutlet weak var dualStreamTips: NSTextField! - @IBAction func onDualStreaming(_ sender: NSSwitch) { - dualStreamTips.stringValue = sender.state == .on ? "Opening".localized : "(Default: flow)".localized - agoraKit.setDualStreamMode(sender.state == .on ? .enableSimulcastStream : .disableSimulcastStream) - } - - @IBOutlet weak var firstFrameSwitch: NSSwitch! - @IBAction func onFirstFrameSwitch(_ sender: NSSwitch) { - if sender.state == .on { - let alertVC = NSAlert() - alertVC.alertStyle = .critical - alertVC.addButton(withTitle: "Sure".localized) - alertVC.addButton(withTitle: "Cancel".localized) - alertVC.messageText = "After this function is enabled, it cannot be disabled and takes effect only when both the primary and secondary ends are enabled".localized - let response = alertVC.runModal() - if response == .alertFirstButtonReturn { - sender.isEnabled = false - agoraKit.enableInstantMediaRendering() - agoraKit.startMediaRenderingTracing() - } - if response == .alertSecondButtonReturn { - sender.state = .off - } - } - } - - @IBOutlet weak var videoImageSwitch: NSSwitch! - @IBOutlet weak var videoImageContainerView: NSView! - @IBAction func onTapVideoImageSwitch(_ sender: NSSwitch) { - let options = AgoraImageTrackOptions() - if let resourcePath = Bundle.main.resourcePath { - let imgPath = resourcePath + "/" + "bg.jpg" - options.imageUrl = imgPath - } - agoraKit.enableVideoImageSource(sender.state == .on, options: options) - } - - @IBOutlet weak var bFrameContainer: NSView! - @IBAction func bFrameSwitch(_ sender: NSSwitch) { - let encoderConfig = AgoraVideoEncoderConfiguration() - let videoOptions = AgoraAdvancedVideoOptions() - videoOptions.compressionPreference = sender.state == .on ? .quality : .lowLatency - encoderConfig.advancedVideoOptions = videoOptions - agoraKit.setVideoEncoderConfiguration(encoderConfig) - } - - @IBOutlet weak var encoderSegment: NSSegmentedControl! - @IBAction func onTapEncoderSegment(_ sender: NSSegmentedControl) { - let encoderConfig = AgoraVideoEncoderConfiguration() - let advancedOptions = AgoraAdvancedVideoOptions() - print(sender.indexOfSelectedItem) - switch sender.indexOfSelectedItem { - case 0: - advancedOptions.encodingPreference = .preferAuto - - case 1: - advancedOptions.encodingPreference = .prefersoftware - - case 2: - advancedOptions.encodingPreference = .preferhardware - - default: break + + @IBOutlet weak var centerStage: Picker! + func initSelectCentetStagePicker() { + centerStage.isEnabled = agoraKit.isCameraCenterStageSupported() + let params: [String: AgoraCameraStabilizationMode] = ["auto": .auto, + "level1": .level1, + "level2": .level2, + "level3": .level3, + "off": .off] + let datas = params.map { $0.key }.sorted() + centerStage.label.stringValue = "Center Stage".localized + centerStage.picker.addItems(withTitles: datas) + centerStage.onSelectChanged { [weak self] in + guard let self = self else { return } + let index = self.selectRolePicker.indexOfSelectedItem + let key = datas[index] + let mode = params[key] + self.agoraKit.enableCameraCenterStage(mode != .off) } - encoderConfig.advancedVideoOptions = advancedOptions - agoraKit.setVideoEncoderConfiguration(encoderConfig) } /** @@ -372,8 +301,8 @@ class LiveStreamingMain: BaseViewController { didSet { channelField.isEnabled = !isJoined selectLayoutPicker.isEnabled = !isJoined - firstFrameSwitch.isEnabled = !isJoined - videoImageSwitch.isEnabled = isJoined + functionVC?.firstFrameSwitch.isEnabled = !isJoined + functionVC?.videoImageSwitch.isEnabled = isJoined initJoinChannelButton() } } @@ -397,12 +326,15 @@ class LiveStreamingMain: BaseViewController { Util.configPrivatization(agoraKit: agoraKit) agoraKit.enableVideo() + scrollView.documentView?.setFrameSize(CGSizeMake(314, 645)) + initSelectCameraPicker() initSelectResolutionPicker() initSelectFpsPicker() initSelectMicsPicker() initSelectLayoutPicker() initSelectRolePicker() + initSelectCentetStagePicker() initChannelField() initJoinChannelButton() initSelectCanvasColor() @@ -536,6 +468,155 @@ class LiveStreamingMain: BaseViewController { } AgoraRtcEngineKit.destroy() } + private var functionVC: LiveStreamingRTCFunctionVC? + override func prepare(for segue: NSStoryboardSegue, sender: Any?) { + functionVC = segue.destinationController as? LiveStreamingRTCFunctionVC + functionVC?.clickTakeSnapshotClosure = { [weak self] in + self?.onTakeSnapshot() + } + functionVC?.clickWaterMarkClosure = { [weak self] isOn in + self?.onWaterMark(isOn) + } + functionVC?.clickDualStreamingClosure = { [weak self] isOn in + self?.onDualStreaming(isOn) + } + functionVC?.clickFirstFrameSwitch = { [weak self] s in + self?.onFirstFrameSwitch(s) + } + functionVC?.clickVideoImageSwitch = { [weak self] s in + self?.onTapVideoImageSwitch(s) + } + functionVC?.clickBFrameSwitch = { [weak self] s in + self?.bFrameSwitch(s) + } + functionVC?.clickEncoderSegmentSwitch = { [weak self] s in + self?.onTapEncoderSegment(s) + } + } + + private func onTakeSnapshot() { + let filePath = FileManager.default.urls(for: .downloadsDirectory, in: .userDomainMask).first?.absoluteString + let programPath = filePath?.components(separatedBy: "/")[4] ?? "" + let path = "/Users/\(programPath)/Downloads/1.png" + agoraKit.takeSnapshot(Int(remoteUid), filePath: path) + } + + private func onWaterMark(_ isOn: Bool) { + if isOn { + if let filepath = Bundle.main.path(forResource: "agora-logo", ofType: "png") { + if let url = URL(string: filepath) { + let waterMark = WatermarkOptions() + waterMark.visibleInPreview = true + let localVideo = self.videos[0] + waterMark.positionInPortraitMode = localVideo.frame.offsetBy(dx: 20, dy: 20) + waterMark.positionInLandscapeMode = localVideo.frame.offsetBy(dx: 20, dy: 20) + agoraKit.addVideoWatermark(url, options: waterMark) + } + } + } else { + agoraKit.clearVideoWatermarks() + } + } + private func onDualStreaming(_ isOn: Bool) { + agoraKit.setDualStreamMode(isOn ? .enableSimulcastStream : .disableSimulcastStream) + } + private func onFirstFrameSwitch(_ sender: NSSwitch) { + if sender.state == .on { + let alertVC = NSAlert() + alertVC.alertStyle = .critical + alertVC.addButton(withTitle: "Sure".localized) + alertVC.addButton(withTitle: "Cancel".localized) + alertVC.messageText = "After this function is enabled, it cannot be disabled and takes effect only when both the primary and secondary ends are enabled".localized + let response = alertVC.runModal() + if response == .alertFirstButtonReturn { + sender.isEnabled = false + agoraKit.enableInstantMediaRendering() + agoraKit.startMediaRenderingTracing() + } + if response == .alertSecondButtonReturn { + sender.state = .off + } + } + } + private func onTapVideoImageSwitch(_ sender: NSSwitch) { + let options = AgoraImageTrackOptions() + if let resourcePath = Bundle.main.resourcePath { + let imgPath = resourcePath + "/" + "bg.jpg" + options.imageUrl = imgPath + } + agoraKit.enableVideoImageSource(sender.state == .on, options: options) + } + private func bFrameSwitch(_ sender: NSSwitch) { + let encoderConfig = AgoraVideoEncoderConfiguration() + let videoOptions = AgoraAdvancedVideoOptions() + videoOptions.compressionPreference = sender.state == .on ? .quality : .lowLatency + encoderConfig.advancedVideoOptions = videoOptions + agoraKit.setVideoEncoderConfiguration(encoderConfig) + } + private func onTapEncoderSegment(_ sender: NSSegmentedControl) { + let encoderConfig = AgoraVideoEncoderConfiguration() + let advancedOptions = AgoraAdvancedVideoOptions() + print(sender.indexOfSelectedItem) + switch sender.indexOfSelectedItem { + case 0: + advancedOptions.encodingPreference = .preferAuto + + case 1: + advancedOptions.encodingPreference = .prefersoftware + + case 2: + advancedOptions.encodingPreference = .preferhardware + + default: break + } + encoderConfig.advancedVideoOptions = advancedOptions + agoraKit.setVideoEncoderConfiguration(encoderConfig) + } +} + +class LiveStreamingRTCFunctionVC: BaseViewController { + var clickTakeSnapshotClosure: (() -> Void)? + var clickWaterMarkClosure: ((Bool) -> Void)? + var clickDualStreamingClosure: ((Bool) -> Void)? + var clickFirstFrameSwitch: ((NSSwitch) -> Void)? + var clickVideoImageSwitch: ((NSSwitch) -> Void)? + var clickBFrameSwitch: ((NSSwitch) -> Void)? + var clickEncoderSegmentSwitch: ((NSSegmentedControl) -> Void)? + @IBOutlet weak var snapShot: NSButton! + @IBAction func onTakeSnapshot(_ sender: Any) { + clickTakeSnapshotClosure?() + } + + @IBOutlet weak var waterMarkContainer: NSView! + @IBAction func onWaterMark(_ sender: NSSwitch) { + clickWaterMarkClosure?(sender.state == .on) + } + + @IBOutlet weak var dualStreamTips: NSTextField! + @IBAction func onDualStreaming(_ sender: NSSwitch) { + dualStreamTips.stringValue = sender.state == .on ? "Opening".localized : "(Default: flow)".localized + clickDualStreamingClosure?(sender.state == .on) + } + @IBOutlet weak var firstFrameSwitch: NSSwitch! + @IBAction func onFirstFrameSwitch(_ sender: NSSwitch) { + clickFirstFrameSwitch?(sender) + } + + @IBOutlet weak var videoImageSwitch: NSSwitch! + @IBOutlet weak var videoImageContainerView: NSView! + @IBAction func onTapVideoImageSwitch(_ sender: NSSwitch) { + clickVideoImageSwitch?(sender) + } + + @IBOutlet weak var bFrameContainer: NSView! + @IBAction func bFrameSwitch(_ sender: NSSwitch) { + clickBFrameSwitch?(sender) + } + + @IBOutlet weak var encoderSegment: NSSegmentedControl! + @IBAction func onTapEncoderSegment(_ sender: NSSegmentedControl) { + clickEncoderSegmentSwitch?(sender) + } } /// agora rtc engine delegate events diff --git a/macOS/APIExample/Examples/Advanced/LiveStreaming/zh-Hans.lproj/LiveStreaming.strings b/macOS/APIExample/Examples/Advanced/LiveStreaming/zh-Hans.lproj/LiveStreaming.strings index 3ba4a62e4..764753351 100644 --- a/macOS/APIExample/Examples/Advanced/LiveStreaming/zh-Hans.lproj/LiveStreaming.strings +++ b/macOS/APIExample/Examples/Advanced/LiveStreaming/zh-Hans.lproj/LiveStreaming.strings @@ -1,40 +1,40 @@ /* Class = "NSTextFieldCell"; title = "water"; ObjectID = "8YN-Yd-UZv"; */ -"8YN-Yd-UZv.title" = "水印"; +"Srd-aW-lWt.title" = "水印"; /* Class = "NSBox"; title = "Box"; ObjectID = "BP9-4w-AfJ"; */ "BP9-4w-AfJ.title" = "Box"; /* Class = "NSSegmentedCell"; D2B-fw-Vnp.ibShadowedLabels[0] = "Auto"; ObjectID = "D2B-fw-Vnp"; */ -"D2B-fw-Vnp.ibShadowedLabels[0]" = "自动"; +"2nu-cG-poA.ibShadowedLabels[0]" = "自动"; /* Class = "NSSegmentedCell"; D2B-fw-Vnp.ibShadowedLabels[1] = "Soft knitting"; ObjectID = "D2B-fw-Vnp"; */ -"D2B-fw-Vnp.ibShadowedLabels[1]" = "软编"; +"2nu-cG-poA.ibShadowedLabels[1]" = "软编"; /* Class = "NSSegmentedCell"; D2B-fw-Vnp.ibShadowedLabels[2] = "Hard knitting"; ObjectID = "D2B-fw-Vnp"; */ -"D2B-fw-Vnp.ibShadowedLabels[2]" = "硬编"; +"2nu-cG-poA.ibShadowedLabels[2]" = "硬编"; /* Class = "NSButtonCell"; title = "Join"; ObjectID = "Lhu-U1-6qh"; */ "Lhu-U1-6qh.title" = "加入频道"; /* Class = "NSTextFieldCell"; title = "first frame is drawn"; ObjectID = "PDX-e5-ZpY"; */ -"PDX-e5-ZpY.title" = "首帧出图"; +"ZlG-nb-3NY.title" = "首帧出图"; /* Class = "NSTextFieldCell"; title = "S or l flow"; ObjectID = "XH3-Ib-cXr"; */ -"XH3-Ib-cXr.title" = "大小流"; +"zOU-d9-jDn.title" = "大小流"; /* Class = "NSViewController"; title = "Join Channel Video"; ObjectID = "YjT-yy-DnJ"; */ "YjT-yy-DnJ.title" = "实时视频通话/直播"; /* Class = "NSButtonCell"; title = "Snap Shot"; ObjectID = "cDh-B1-x3E"; */ -"cDh-B1-x3E.title" = "截图"; +"hca-Of-3bM.title" = "截图"; /* Class = "NSTextFieldCell"; title = "(Default: flow)"; ObjectID = "d9V-RQ-OX6"; */ -"d9V-RQ-OX6.title" = "(默认: 大流)"; +"G8a-XO-yaN.title" = "(默认: 大流)"; /* Class = "NSTextFieldCell"; title = "BF"; ObjectID = "gt3-r0-jqt"; */ -"gt3-r0-jqt.title" = "B帧"; +"Prc-Ti-Ayl.title" = "B帧"; "gCs-hv-sr4.title" = "预加载"; -"vMN-rl-SRy.title" = "垫片推流"; +"gBn-zJ-ZES.title" = "垫片推流"; diff --git a/macOS/APIExample/Examples/Advanced/RawAudioData/RawAudioData.storyboard b/macOS/APIExample/Examples/Advanced/RawAudioData/RawAudioData.storyboard index bb9766368..c251da453 100644 --- a/macOS/APIExample/Examples/Advanced/RawAudioData/RawAudioData.storyboard +++ b/macOS/APIExample/Examples/Advanced/RawAudioData/RawAudioData.storyboard @@ -1,8 +1,8 @@ - + - + @@ -32,6 +32,26 @@ + + + + + + + + + + @@ -111,6 +131,7 @@ + diff --git a/macOS/APIExample/Examples/Advanced/RawAudioData/RawAudioData.swift b/macOS/APIExample/Examples/Advanced/RawAudioData/RawAudioData.swift index 11cf0fd15..9329463a3 100644 --- a/macOS/APIExample/Examples/Advanced/RawAudioData/RawAudioData.swift +++ b/macOS/APIExample/Examples/Advanced/RawAudioData/RawAudioData.swift @@ -17,6 +17,7 @@ class RawAudioData: BaseViewController { @IBOutlet weak var Container: AGEVideoContainer! + @IBOutlet weak var textField: NSTextField! /** --- Microphones Picker --- */ @@ -104,6 +105,7 @@ class RawAudioData: BaseViewController { didSet { channelField.isEnabled = !isJoined selectLayoutPicker.isEnabled = !isJoined + textField.isEnabled = isJoined initJoinChannelButton() } } @@ -122,6 +124,8 @@ class RawAudioData: BaseViewController { config.appId = KeyCenter.AppId config.areaCode = GlobalSettings.shared.area agoraKit = AgoraRtcEngineKit.sharedEngine(with: config, delegate: self) + // Audio4 is required to send Audio Meta Data. + agoraKit.setParameters("{\"rtc.use_audio4\":true}") // Configuring Privatization Parameters Util.configPrivatization(agoraKit: agoraKit) initSelectMicsPicker() @@ -141,6 +145,12 @@ class RawAudioData: BaseViewController { AgoraRtcEngineKit.destroy() } + @IBAction func onClickSendButton(_ sender: Any) { + guard let data = textField.stringValue.data(using: .utf8) else { return } + agoraKit.sendAudioMetadata(data) + textField.stringValue = "" + } + @IBAction func onJoinPressed(_ sender:Any) { if !isJoined { // check configuration @@ -223,6 +233,10 @@ class RawAudioData: BaseViewController { /// agora rtc engine delegate events extension RawAudioData: AgoraRtcEngineDelegate { + func rtcEngine(_ engine: AgoraRtcEngineKit, audioMetadataReceived uid: UInt, metadata: Data) { + let data = String(data: metadata, encoding: .utf8) ?? "" + showAlert(message: data) + } /// callback when warning occured for agora sdk, warning can usually be ignored, still it's nice to check out /// what is happening /// Warning code description can be found at: diff --git a/macOS/APIExample/Examples/Advanced/StreamEncryption/StreamEncryption.swift b/macOS/APIExample/Examples/Advanced/StreamEncryption/StreamEncryption.swift index e465961cb..c3a0b3094 100644 --- a/macOS/APIExample/Examples/Advanced/StreamEncryption/StreamEncryption.swift +++ b/macOS/APIExample/Examples/Advanced/StreamEncryption/StreamEncryption.swift @@ -315,8 +315,10 @@ class StreamEncryption: BaseViewController { } func getEncryptionSaltFromServer() -> Data { - - return "EncryptionKdfSaltInBase64Strings".data(using: .utf8)! + // Salt string should be the output of the following command: + // openssl rand -base64 32 + let saltBase64String = "NiIeJ08AbtcQVjvV+oOEvF/4Dz5dy1CIwa805C8J2w0=" + return Data(base64Encoded: saltBase64String.data(using: .utf8)!)! } func layoutVideos(_ count: Int) { diff --git a/macOS/Podfile b/macOS/Podfile index 854a851a7..fa1058f2f 100644 --- a/macOS/Podfile +++ b/macOS/Podfile @@ -4,18 +4,19 @@ target 'APIExample' do use_frameworks! pod 'AGEVideoLayout', '1.0.2' - pod 'AgoraRtcEngine_macOS', '4.3.0' - # pod 'sdk', :path => 'sdk.podspec' + pod 'AgoraRtcEngine_macOS', '4.3.1' + # pod 'sdk', :path => 'sdk.podspec' end target 'SimpleFilter' do use_frameworks! - # pod 'sdk', :path => 'sdk.podspec' - pod 'AgoraRtcEngine_macOS', '4.3.0' + # pod 'sdk', :path => 'sdk.podspec' + pod 'AgoraRtcEngine_macOS', '4.3.1' end post_install do |installer| + # system("sh .download_script.sh 4.3.1 true") installer.pods_project.targets.each do |target| target.build_configurations.each do |config| config.build_settings['MACOSX_DEPLOYMENT_TARGET'] = '10.13' diff --git a/macOS/package_script.sh b/macOS/package_script.sh new file mode 100755 index 000000000..e36ce8d9c --- /dev/null +++ b/macOS/package_script.sh @@ -0,0 +1,17 @@ + + +http_proxy=http://10.10.114.51:1080 https_proxy=http://10.10.114.51:1080 pod install --repo-update +if [ $? -eq 0 ]; then + echo "pod success" +else + echo "pod failed" + exit 1 +fi + +script_path="$( cd "$(dirname "$0")" ; pwd -P )" +echo $script_path +current_datetime=$(date +'%Y-%m-%d_%H-%M-%S') +echo $current_datetime +project_name=$(echo $script_path | rev | cut -d'/' -f1 | rev) +echo $project_name +zip -ry ${1}_${project_name}_${current_datetime}.zip . \ No newline at end of file diff --git a/windows/APIExample/APIExample/APIExample.rc b/windows/APIExample/APIExample/APIExample.rc index e5662e922..bad29a8a9 100755 --- a/windows/APIExample/APIExample/APIExample.rc +++ b/windows/APIExample/APIExample/APIExample.rc @@ -156,15 +156,16 @@ FONT 8, "MS Shell Dlg", 400, 0, 0x1 BEGIN LTEXT "",IDC_STATIC_VIDEO,1,0,483,310,NOT WS_VISIBLE LISTBOX IDC_LIST_INFO_BROADCASTING,491,0,139,312,LBS_NOINTEGRALHEIGHT | LBS_DISABLENOSCROLL | WS_VSCROLL | WS_HSCROLL | WS_TABSTOP - LTEXT "Channel Name",IDC_STATIC_CHANNELNAME,11,328,48,8 - EDITTEXT IDC_EDIT_CHANNELNAME,71,326,242,15,ES_AUTOHSCROLL - PUSHBUTTON "JoinChannel",IDC_BUTTON_JOINCHANNEL,322,326,50,14 - LTEXT "Send SEI",IDC_STATIC_SENDSEI,11,353,48,8 - EDITTEXT IDC_EDIT_SEI,71,351,244,21,ES_MULTILINE | ES_AUTOVSCROLL - PUSHBUTTON "Send",IDC_BUTTON_SEND,325,350,50,14 - EDITTEXT IDC_EDIT_RECV,11,377,419,20,ES_MULTILINE | WS_DISABLED + LTEXT "Channel Name",IDC_STATIC_CHANNELNAME,14,327,48,8 + EDITTEXT IDC_EDIT_CHANNELNAME,71,324,242,15,ES_AUTOHSCROLL + PUSHBUTTON "JoinChannel",IDC_BUTTON_JOINCHANNEL,326,324,50,14 + LTEXT "Send SEI",IDC_STATIC_VIDEO_METADATA,14,350,48,8 + EDITTEXT IDC_EDIT_AUDIO_METADATA,71,370,244,14,ES_MULTILINE | ES_AUTOVSCROLL + PUSHBUTTON "Send",IDC_BUTTON_SEND_VIDEO_METADATA,326,347,50,14 LTEXT "",IDC_STATIC_METADATA_INFO,493,321,137,16 - PUSHBUTTON "Clear",IDC_BUTTON_CLEAR,385,351,50,14 + LTEXT "Send SEI",IDC_STATIC_AUDIO_METADATA,14,373,48,8 + EDITTEXT IDC_EDIT_VIDEO_METADATA,71,347,244,14,ES_MULTILINE | ES_AUTOVSCROLL + PUSHBUTTON "Send",IDC_BUTTON_SEND_AUDIO_METADATA,326,370,50,14 END IDD_DIALOG_SCREEN_SHARE DIALOGEX 0, 0, 632, 412 @@ -802,8 +803,14 @@ BEGIN IDD_DIALOG_METADATA, DIALOG BEGIN RIGHTMARGIN, 630 + VERTGUIDE, 14 + VERTGUIDE, 71 + VERTGUIDE, 326 VERTGUIDE, 466 BOTTOMMARGIN, 397 + HORZGUIDE, 331 + HORZGUIDE, 354 + HORZGUIDE, 377 END IDD_DIALOG_SCREEN_SHARE, DIALOG diff --git a/windows/APIExample/APIExample/APIExample.vcxproj b/windows/APIExample/APIExample/APIExample.vcxproj index 7c44bcbec..ae9d15a06 100644 --- a/windows/APIExample/APIExample/APIExample.vcxproj +++ b/windows/APIExample/APIExample/APIExample.vcxproj @@ -95,7 +95,7 @@ Disabled false WIN32;_WINDOWS;_DEBUG;_CRT_SECURE_NO_WARNINGS;%(PreprocessorDefinitions); - $(solutionDir)ThirdParty\libFFmpeg\include;$(SolutionDir)sdk\high_level_api\include;$(solutionDir)ThirdParty\libYUV;$(ProjectDir) + $(solutionDir)ThirdParty\libFFmpeg\include;$(solutionDir)ThirdParty\Boost;$(SolutionDir)sdk\high_level_api\include;$(solutionDir)ThirdParty\libYUV;$(ProjectDir) MultiThreadedDLL @@ -140,7 +140,7 @@ if exist sample.yuv (copy sample.yuv $(SolutionDir)$(Configuration)) Disabled true _WINDOWS;_DEBUG;_CRT_SECURE_NO_WARNINGS;%(PreprocessorDefinitions) - $(SolutionDir)sdk\high_level_api\include;$(solutionDir)ThirdParty\libYUV\;$(solutionDir)ThirdParty\libFFmpeg\include;$(ProjectDir) + $(SolutionDir)sdk\high_level_api\include;$(solutionDir)ThirdParty\libYUV\;$(solutionDir)ThirdParty\Boost;$(solutionDir)ThirdParty\libFFmpeg\include;$(ProjectDir) MultiThreadedDLL @@ -182,7 +182,7 @@ if exist sample.yuv (copy sample.yuv $(SolutionDir)$(Platform)$(Configuration)) true false WIN32;_WINDOWS;NDEBUG;_CRT_SECURE_NO_WARNINGS;%(PreprocessorDefinitions); - $(SolutionDir)sdk\high_level_api\include;$(solutionDir)ThirdParty\libFFmpeg\include;$(solutionDir)ThirdParty\libYUV;$(ProjectDir) + $(solutionDir)ThirdParty\Boost;$(SolutionDir)sdk\high_level_api\include;$(solutionDir)ThirdParty\libFFmpeg\include;$(solutionDir)ThirdParty\libYUV;$(ProjectDir) MultiThreadedDLL @@ -231,7 +231,7 @@ if exist sample.yuv (copy sample.yuv $(SolutionDir)$(Configuration)) true true _WINDOWS;NDEBUG;_CRT_SECURE_NO_WARNINGS;%(PreprocessorDefinitions) - $(SolutionDir)sdk\high_level_api\include;$(solutionDir)ThirdParty\libFFmpeg\include;$(solutionDir)ThirdParty\libYUV;$(ProjectDir) + $(solutionDir)ThirdParty\Boost;$(SolutionDir)sdk\high_level_api\include;$(solutionDir)ThirdParty\libFFmpeg\include;$(solutionDir)ThirdParty\libYUV;$(ProjectDir) MultiThreadedDLL @@ -281,6 +281,7 @@ if exist sample.yuv (copy sample.yuv $(SolutionDir)$(Platform)\$(Configuration)) + @@ -294,7 +295,6 @@ if exist sample.yuv (copy sample.yuv $(SolutionDir)$(Platform)\$(Configuration)) - @@ -333,6 +333,7 @@ if exist sample.yuv (copy sample.yuv $(SolutionDir)$(Platform)\$(Configuration)) + @@ -346,7 +347,6 @@ if exist sample.yuv (copy sample.yuv $(SolutionDir)$(Platform)\$(Configuration)) - diff --git a/windows/APIExample/APIExample/APIExample.vcxproj.filters b/windows/APIExample/APIExample/APIExample.vcxproj.filters index 5863469bd..de1e3f9ea 100644 --- a/windows/APIExample/APIExample/APIExample.vcxproj.filters +++ b/windows/APIExample/APIExample/APIExample.vcxproj.filters @@ -22,9 +22,6 @@ {0637d784-14a1-4260-901a-caa73030f229} - - {ed782797-3b06-44a9-8894-6b9d93d0dfea} - {7682a8b6-0c3d-46bc-8519-8ec74432ff48} @@ -112,6 +109,9 @@ {f40fbcc5-c4fb-4581-a7b2-05cbcc5f737c} + + {ed782797-3b06-44a9-8894-6b9d93d0dfea} + @@ -156,9 +156,6 @@ Advanced\ScreenShare - - Advanced\VideoMetadata - Basic\LiveBroadcasting @@ -264,6 +261,9 @@ Basic\JoinChannelVideoByToken + + Advanced\Metadata + @@ -299,9 +299,6 @@ Advanced\ScreenShare - - Advanced\VideoMetadata - Basic\LiveBroadcasting @@ -404,6 +401,9 @@ Basic\JoinChannelVideoByToken + + Advanced\Metadata + diff --git a/windows/APIExample/APIExample/APIExampleDlg.cpp b/windows/APIExample/APIExample/APIExampleDlg.cpp index 39a139063..7281c0795 100755 --- a/windows/APIExample/APIExample/APIExampleDlg.cpp +++ b/windows/APIExample/APIExample/APIExampleDlg.cpp @@ -226,7 +226,7 @@ void CAPIExampleDlg::InitSceneDialog() //advanced list // m_vecAdvanced.push_back(advancedRtmpInject); m_vecAdvanced.push_back(advancedRtmpStreaming); - m_vecAdvanced.push_back(advancedVideoMetadata); + m_vecAdvanced.push_back(advancedMetadata); m_vecAdvanced.push_back(advancedMediaPlayer); m_vecAdvanced.push_back(advancedMediaRecorder); m_vecAdvanced.push_back(advancedScreenCap); @@ -520,7 +520,7 @@ void CAPIExampleDlg::CreateScene(CTreeCtrl& treeScene, CString selectedText) }else if (selectedText.Compare(advancedRtmpStreaming) == 0) { m_pRtmpStreamingDlg->InitAgora(); m_pRtmpStreamingDlg->ShowWindow(SW_SHOW); - }else if (selectedText.Compare(advancedVideoMetadata) == 0) { + }else if (selectedText.Compare(advancedMetadata) == 0) { m_pVideoSEIDlg->InitAgora(); m_pVideoSEIDlg->ShowWindow(SW_SHOW); }else if (selectedText.Compare(advancedScreenCap) == 0) { @@ -625,7 +625,7 @@ void CAPIExampleDlg::ReleaseScene(CTreeCtrl& treeScene, HTREEITEM& hSelectItem) }else if (str.Compare(advancedRtmpStreaming) == 0) { m_pRtmpStreamingDlg->UnInitAgora(); m_pRtmpStreamingDlg->ShowWindow(SW_HIDE); - }else if (str.Compare(advancedVideoMetadata) == 0) { + }else if (str.Compare(advancedMetadata) == 0) { m_pVideoSEIDlg->UnInitAgora(); m_pVideoSEIDlg->ShowWindow(SW_HIDE); }else if (str.Compare(advancedScreenCap) == 0){ diff --git a/windows/APIExample/APIExample/APIExampleDlg.h b/windows/APIExample/APIExample/APIExampleDlg.h index a663ce211..12c9bd6bb 100755 --- a/windows/APIExample/APIExample/APIExampleDlg.h +++ b/windows/APIExample/APIExample/APIExampleDlg.h @@ -7,7 +7,7 @@ #include "Basic/JoinChannelVideoByToken/CJoinChannelVideoByTokenDlg.h" #include "Advanced/RTMPinject/AgoraRtmpInjectionDlg.h" #include "Advanced/RTMPStream/AgoraRtmpStreaming.h" -#include "Advanced/VideoMetadata/CAgoraMetaDataDlg.h" +#include "Advanced/Metadata/CAgoraMetaDataDlg.h" #include "Advanced/ScreenShare/AgoraScreenCapture.h" #include "Advanced/CustomAudioCapture/CAgoraCaptureAudioDlg.h" #include "Advanced/CustomVideoCapture/CAgoraCaptureVideoDlg.h" diff --git a/windows/APIExample/APIExample/Advanced/AudioMixing/CAgoraAudioMixingDlg.cpp b/windows/APIExample/APIExample/Advanced/AudioMixing/CAgoraAudioMixingDlg.cpp index 8685da2d4..70fc52d84 100755 --- a/windows/APIExample/APIExample/Advanced/AudioMixing/CAgoraAudioMixingDlg.cpp +++ b/windows/APIExample/APIExample/Advanced/AudioMixing/CAgoraAudioMixingDlg.cpp @@ -76,7 +76,7 @@ bool CAgoraAudioMixingDlg::InitAgora() //enable video in the engine. m_rtcEngine->enableVideo(); m_lstInfo.InsertString(m_lstInfo.GetCount(), _T("enable video")); - + m_lstInfo.InsertString(m_lstInfo.GetCount(), _T("live broadcasting")); //set client role in the engine to the CLIENT_ROLE_BROADCASTER. m_rtcEngine->setClientRole(CLIENT_ROLE_BROADCASTER); @@ -186,14 +186,14 @@ BEGIN_MESSAGE_MAP(CAgoraAudioMixingDlg, CDialogEx) ON_BN_CLICKED(IDC_BUTTON_MIXING_RESUME, &CAgoraAudioMixingDlg::OnBnClickedButtonMixingResume) ON_BN_CLICKED(IDC_BUTTON_MIXING_PAUSE, &CAgoraAudioMixingDlg::OnBnClickedButtonMixingPause) ON_BN_CLICKED(IDC_BUTTON_MIXING_STOP, &CAgoraAudioMixingDlg::OnBnClickedButtonMixingStop) - ON_NOTIFY(NM_CUSTOMDRAW, IDC_SLIDER_MIXING_VOLUME, &CAgoraAudioMixingDlg::OnNMCustomdrawSliderMixingVolume) - ON_NOTIFY(NM_CUSTOMDRAW, IDC_SLIDER_MIXING_PLAYOUT_VOLUME, &CAgoraAudioMixingDlg::OnNMCustomdrawSliderMixingPlayoutVolume) - ON_NOTIFY(NM_CUSTOMDRAW, IDC_SLIDER_MIXING_PUBLISH_VOLUME, &CAgoraAudioMixingDlg::OnNMCustomdrawSliderMixingPublishVolume) + ON_NOTIFY(NM_RELEASEDCAPTURE, IDC_SLIDER_MIXING_VOLUME, &CAgoraAudioMixingDlg::OnNMCustomdrawSliderMixingVolume) + ON_NOTIFY(NM_RELEASEDCAPTURE, IDC_SLIDER_MIXING_PLAYOUT_VOLUME, &CAgoraAudioMixingDlg::OnNMCustomdrawSliderMixingPlayoutVolume) + ON_NOTIFY(NM_RELEASEDCAPTURE, IDC_SLIDER_MIXING_PUBLISH_VOLUME, &CAgoraAudioMixingDlg::OnNMCustomdrawSliderMixingPublishVolume) ON_BN_CLICKED(IDC_BUTTON_EFFECT_START, &CAgoraAudioMixingDlg::OnBnClickedButtonEffectStart) ON_BN_CLICKED(IDC_BUTTON_EFFECT_RESUME, &CAgoraAudioMixingDlg::OnBnClickedButtonEffectResume) ON_BN_CLICKED(IDC_BUTTON_EFFECT_PAUSE, &CAgoraAudioMixingDlg::OnBnClickedButtonEffectPause) ON_BN_CLICKED(IDC_BUTTON_EFFECT_STOP, &CAgoraAudioMixingDlg::OnBnClickedButtonEffectStop) - ON_NOTIFY(NM_CUSTOMDRAW, IDC_SLIDER_EFFECT_VOLUME, &CAgoraAudioMixingDlg::OnNMCustomdrawSliderEffectVolume) + ON_NOTIFY(NM_RELEASEDCAPTURE, IDC_SLIDER_EFFECT_VOLUME, &CAgoraAudioMixingDlg::OnNMCustomdrawSliderEffectVolume) END_MESSAGE_MAP() diff --git a/windows/APIExample/APIExample/Advanced/Beauty/CDlgBeauty.cpp b/windows/APIExample/APIExample/Advanced/Beauty/CDlgBeauty.cpp index a2e4574f2..29e395339 100755 --- a/windows/APIExample/APIExample/Advanced/Beauty/CDlgBeauty.cpp +++ b/windows/APIExample/APIExample/Advanced/Beauty/CDlgBeauty.cpp @@ -56,13 +56,13 @@ BEGIN_MESSAGE_MAP(CDlgBeauty, CDialogEx) ON_MESSAGE(WM_MSGID(EID_LEAVE_CHANNEL), &CDlgBeauty::OnEIDLeaveChannel) ON_MESSAGE(WM_MSGID(EID_JOINCHANNEL_SUCCESS), &CDlgBeauty::OnEIDJoinChannelSuccess) ON_BN_CLICKED(IDC_CHECK_BEAUTY_ENABLE, &CDlgBeauty::OnBnClickedCheckBeautyEnable) - ON_NOTIFY(NM_CUSTOMDRAW, IDC_SLIDER_REDNESS, &CDlgBeauty::OnNMCustomdrawSliderRedness) - ON_NOTIFY(TRBN_THUMBPOSCHANGING, IDC_SLIDER_LIGHTENING, &CDlgBeauty::OnThumbposchangingSliderLightening) - ON_NOTIFY(NM_CUSTOMDRAW, IDC_SLIDER_SMOOTHNESS, &CDlgBeauty::OnNMCustomdrawSliderSmoothness) + ON_NOTIFY(NM_RELEASEDCAPTURE, IDC_SLIDER_REDNESS, &CDlgBeauty::OnNMCustomdrawSliderRedness) + ON_NOTIFY(NM_RELEASEDCAPTURE, IDC_SLIDER_LIGHTENING, &CDlgBeauty::OnThumbposchangingSliderLightening) + ON_NOTIFY(NM_RELEASEDCAPTURE, IDC_SLIDER_SMOOTHNESS, &CDlgBeauty::OnNMCustomdrawSliderSmoothness) ON_CBN_SELCHANGE(IDC_COMBO_BEAUTE_LIGHTENING_CONTRAST_LEVEL, &CDlgBeauty::OnSelchangeComboBeauteLighteningContrastLevel) ON_BN_CLICKED(IDC_CHECK_ENHANCE, &CDlgBeauty::OnBnClickedCheckEnhance) - ON_NOTIFY(NM_CUSTOMDRAW, IDC_SLIDER_STRENGTH, &CDlgBeauty::OnCustomdrawSliderStrength) - ON_NOTIFY(NM_CUSTOMDRAW, IDC_SLIDER_SKIN_PROTECT, &CDlgBeauty::OnCustomdrawSliderSkinProtect) + ON_NOTIFY(NM_RELEASEDCAPTURE, IDC_SLIDER_STRENGTH, &CDlgBeauty::OnCustomdrawSliderStrength) + ON_NOTIFY(NM_RELEASEDCAPTURE, IDC_SLIDER_SKIN_PROTECT, &CDlgBeauty::OnCustomdrawSliderSkinProtect) ON_BN_CLICKED(IDC_CHECK_VIDEO_DENOISE, &CDlgBeauty::OnBnClickedCheckVideoDenoise) ON_BN_CLICKED(IDC_CHECK_VIDEO_DENOISE2, &CDlgBeauty::OnBnClickedCheckVideoDenoise2) ON_BN_CLICKED(IDC_CHECK_LOWLIGHT, &CDlgBeauty::OnBnClickedCheckLowlight) diff --git a/windows/APIExample/APIExample/Advanced/BeautyAudio/CAgoraBeautyAudio.cpp b/windows/APIExample/APIExample/Advanced/BeautyAudio/CAgoraBeautyAudio.cpp index d99f3930c..b94737eb7 100644 --- a/windows/APIExample/APIExample/Advanced/BeautyAudio/CAgoraBeautyAudio.cpp +++ b/windows/APIExample/APIExample/Advanced/BeautyAudio/CAgoraBeautyAudio.cpp @@ -182,7 +182,7 @@ BEGIN_MESSAGE_MAP(CAgoraBeautyAudio, CDialogEx) ON_LBN_SELCHANGE(IDC_LIST_INFO_BROADCASTING, &CAgoraBeautyAudio::OnSelchangeListInfoBroadcasting) ON_CBN_SELCHANGE(IDC_COMBO_AUDIO_CHANGER, &CAgoraBeautyAudio::OnSelchangeComboAudioChanger) ON_CBN_SELCHANGE(IDC_COMBO_AUDIO_PERVERB_PRESET, &CAgoraBeautyAudio::OnSelchangeComboAudioPerverbPreset) - ON_NOTIFY(NM_CUSTOMDRAW, IDC_SLIDER_VOICE_FORMANT, &CAgoraBeautyAudio::OnNMCustomdrawSliderVoiceFormant) + ON_NOTIFY(NM_RELEASEDCAPTURE, IDC_SLIDER_VOICE_FORMANT, &CAgoraBeautyAudio::OnNMCustomdrawSliderVoiceFormant) ON_CBN_SELCHANGE(IDC_COMBO_AUDIO_AINS_MODE, &CAgoraBeautyAudio::OnCbnSelchangeComboAudioAinsMode) END_MESSAGE_MAP() diff --git a/windows/APIExample/APIExample/Advanced/FaceCapture/CAgoraFaceCaptureDlg.cpp b/windows/APIExample/APIExample/Advanced/FaceCapture/CAgoraFaceCaptureDlg.cpp index dd644e5d0..fb2e54a97 100644 --- a/windows/APIExample/APIExample/Advanced/FaceCapture/CAgoraFaceCaptureDlg.cpp +++ b/windows/APIExample/APIExample/Advanced/FaceCapture/CAgoraFaceCaptureDlg.cpp @@ -71,7 +71,7 @@ bool CAgoraFaceCaptureDlg::InitAgora() // enable face capture ret = m_rtcEngine->enableExtension("agora_video_filters_face_capture", "face_capture", true, MEDIA_SOURCE_TYPE::PRIMARY_CAMERA_SOURCE); - strInfo.Format(_T("Enable face capture cxtension ret: %d"), ret); + strInfo.Format(_T("Enable face capture extension ret: %d"), ret); m_lstInfo.InsertString(m_lstInfo.GetCount(), strInfo); // face capture authentication @@ -85,9 +85,31 @@ bool CAgoraFaceCaptureDlg::InitAgora() strInfo.Format(_T("Auth face capture ret: %d"), ret); m_lstInfo.InsertString(m_lstInfo.GetCount(), strInfo); + // load speech driven extension + ret = m_rtcEngine->loadExtensionProvider("libagora_lip_sync_extension.dll"); + strInfo.Format(_T("Load speech driven extension ret: %d"), ret); + m_lstInfo.InsertString(m_lstInfo.GetCount(), strInfo); + + // enable speech driven + ret = m_rtcEngine->enableExtension("agora_filters_lip_sync", "lip_sync", true, MEDIA_SOURCE_TYPE::SPEECH_DRIVEN_VIDEO_SOURCE); + strInfo.Format(_T("Enable speech driven extension ret: %d"), ret); + m_lstInfo.InsertString(m_lstInfo.GetCount(), strInfo); + + // speech driven authentication + strAuth.Format(_T("{\"company_id\":\"%s\",\"license\":\"%s\",\"open_agc\":true}"), _T("agoraTest"), utf82cs(std::string(FACE_CAPTURE_LICENSE))); + ret = m_rtcEngine->setExtensionProperty("agora_filters_lip_sync", + "lip_sync", + "parameters", + cs2utf8(strAuth).c_str(), + MEDIA_SOURCE_TYPE::SPEECH_DRIVEN_VIDEO_SOURCE); + strInfo.Format(_T("Auth speech driven ret: %d"), ret); + m_lstInfo.InsertString(m_lstInfo.GetCount(), strInfo); + m_videoFrameObserver.SetMsgReceiver(m_hWnd); RegisterVideoFrameObserver(TRUE, &m_videoFrameObserver); + m_faceInfoObserver.SetMsgReceiver(m_hWnd); + RegisterFaceInfoObserver(TRUE, &m_faceInfoObserver); return true; } @@ -108,6 +130,8 @@ void CAgoraFaceCaptureDlg::UnInitAgora() m_lstInfo.InsertString(m_lstInfo.GetCount(), _T("disableVideo")); RegisterVideoFrameObserver(FALSE); m_videoFrameObserver.SetMsgReceiver(nullptr); + RegisterFaceInfoObserver(FALSE); + m_faceInfoObserver.SetMsgReceiver(nullptr); //release engine. if (m_initialize) { m_rtcEngine->release(true); @@ -170,6 +194,7 @@ BEGIN_MESSAGE_MAP(CAgoraFaceCaptureDlg, CDialogEx) ON_BN_CLICKED(IDC_BUTTON_JOINCHANNEL, &CAgoraFaceCaptureDlg::OnBnClickedButtonJoinchannel) ON_MESSAGE(WM_MSGID(EID_EXTENSION_EVENT), &CAgoraFaceCaptureDlg::OnEIDExtensionEvent) ON_MESSAGE(WM_MSGID(EID_ON_CAPTURE_VIDEOFRAME), &CAgoraFaceCaptureDlg::OnEIDonCaptureVideoFrame) + ON_MESSAGE(WM_MSGID(EID_ON_FACE_INFO), &CAgoraFaceCaptureDlg::OnEIDonFaceInfo) ON_LBN_SELCHANGE(IDC_LIST_INFO_BROADCASTING, &CAgoraFaceCaptureDlg::OnSelchangeListInfoBroadcasting) END_MESSAGE_MAP() @@ -225,6 +250,25 @@ BOOL CAgoraFaceCaptureDlg::RegisterVideoFrameObserver(BOOL bEnable, agora::media return nRet == 0 ? TRUE : FALSE; } +BOOL CAgoraFaceCaptureDlg::RegisterFaceInfoObserver(BOOL bEnable, agora::media::IFaceInfoObserver* faceInfoObserver) +{ + agora::util::AutoPtr mediaEngine; + //query interface agora::AGORA_IID_MEDIA_ENGINE in the engine. + mediaEngine.queryInterface(m_rtcEngine, AGORA_IID_MEDIA_ENGINE); + int nRet = 0; + if (mediaEngine.get() == NULL) + return FALSE; + if (bEnable) { + //register agora video frame observer. + nRet = mediaEngine->registerFaceInfoObserver(faceInfoObserver); + } + else { + //unregister agora video frame observer. + nRet = mediaEngine->registerFaceInfoObserver(nullptr); + } + return nRet == 0 ? TRUE : FALSE; +} + //click button handler to join channel or leave channel. void CAgoraFaceCaptureDlg::OnBnClickedButtonJoinchannel() { @@ -370,6 +414,17 @@ LRESULT CAgoraFaceCaptureDlg::OnEIDExtensionEvent(WPARAM wParam, LPARAM lParam) strInfo.Format(_T("Face caputure authentication unset.")); } } + else if (strcmp(event->provider, "agora_filters_lip_sync") == 0 + && strcmp(event->extension, "lip_sync") == 0 + && strcmp(event->key, "status_code") == 0) { + if (strcmp(event->value, "0") == 0) { + // authentication successful. + strInfo.Format(_T("Speech driven authentication successful.")); + } + else { + strInfo.Format(_T("Speech driven authentication failed. code=%s"), event->value); + } + } m_lstInfo.InsertString(m_lstInfo.GetCount(), strInfo); delete[] event->provider; @@ -385,11 +440,21 @@ LRESULT CAgoraFaceCaptureDlg::OnEIDonCaptureVideoFrame(WPARAM wParam, LPARAM lPa { char* metedata = (char*)wParam; - m_edtMetadataInfo.SetWindowTextW(utf82cs(std::string(metedata))); + // m_edtMetadataInfo.SetWindowTextW(utf82cs(std::string(metedata))); + delete[] metedata; return 0; } +LRESULT CAgoraFaceCaptureDlg::OnEIDonFaceInfo(WPARAM wParam, LPARAM lParam) +{ + char* faceInfo = (char*)wParam; + + m_edtMetadataInfo.SetWindowTextW(utf82cs(std::string(faceInfo))); + delete[] faceInfo; + return 0; +} + /* note: Join the channel callback.This callback method indicates that the client @@ -537,6 +602,19 @@ bool FaceCaptureVideoFrameObserver::onCaptureVideoFrame(agora::rtc::VIDEO_SOURCE return true; } +bool FaceCaptureFaceInfoObserver::onFaceInfo(const char* outFaceInfo) { + if (m_hMsgHanlder && outFaceInfo) { + + int len = strlen(outFaceInfo); + char* out = new char[len + 1]; + out[len] = 0; + strcpy_s(out, len + 1, outFaceInfo); + ::PostMessage(m_hMsgHanlder, WM_MSGID(EID_ON_FACE_INFO), (WPARAM)out, 0); + + } + return true; +} + BOOL CAgoraFaceCaptureDlg::PreTranslateMessage(MSG* pMsg) { diff --git a/windows/APIExample/APIExample/Advanced/FaceCapture/CAgoraFaceCaptureDlg.h b/windows/APIExample/APIExample/Advanced/FaceCapture/CAgoraFaceCaptureDlg.h index ce6a2e5f8..a76d13666 100644 --- a/windows/APIExample/APIExample/Advanced/FaceCapture/CAgoraFaceCaptureDlg.h +++ b/windows/APIExample/APIExample/Advanced/FaceCapture/CAgoraFaceCaptureDlg.h @@ -2,7 +2,7 @@ #include "AGVideoWnd.h" -#define FACE_CAPTURE_LICENSE "" +#define FACE_CAPTURE_LICENSE "0qIR1q9+df9pr6ha8ESiBoj51vKDGOvAILh8tO5AyGaxaUteIAzkAQlIitWIGypt1y8sDzZy/wYRz/q8FxKDwkev6PvON4z7sHTpQMPXFTbJgoGQsBvbY4zRWC0o1raLlvnA+Rui15ySFTQCgsM8QnsyF/gGPNEy639N8OaUbj0=" // Video processing Frame Observer class FaceCaptureVideoFrameObserver : @@ -58,6 +58,28 @@ class FaceCaptureVideoFrameObserver : HWND m_hMsgHanlder; }; +class FaceCaptureFaceInfoObserver : + public agora::media::IFaceInfoObserver +{ + +public: + + // set the message notify window handler + void SetMsgReceiver(HWND hWnd) { m_hMsgHanlder = hWnd; } + + /** + * Occurs when the face info is received. + * @param outFaceInfo The output face info. + * @return + * - true: The face info is valid. + * - false: The face info is invalid. + */ + virtual bool onFaceInfo(const char* outFaceInfo) override; + +private: + HWND m_hMsgHanlder; +}; + class FaceCaptureEventHandler : public IRtcEngineEventHandler @@ -165,7 +187,7 @@ class CAgoraFaceCaptureDlg : public CDialogEx void ResumeStatus(); //register or unregister agora video Frame Observer. BOOL RegisterVideoFrameObserver(BOOL bEnable, agora::media::IVideoFrameObserver * videoFrameObserver = NULL); - + BOOL RegisterFaceInfoObserver(BOOL bEnable, agora::media::IFaceInfoObserver* faceInfoObserver = NULL); private: bool m_joinChannel = false; bool m_initialize = false; @@ -173,6 +195,7 @@ class CAgoraFaceCaptureDlg : public CDialogEx CAGVideoWnd m_localVideoWnd; FaceCaptureEventHandler m_eventHandler; FaceCaptureVideoFrameObserver m_videoFrameObserver; + FaceCaptureFaceInfoObserver m_faceInfoObserver; protected: virtual void DoDataExchange(CDataExchange* pDX); LRESULT OnEIDJoinChannelSuccess(WPARAM wParam, LPARAM lParam); @@ -182,6 +205,7 @@ class CAgoraFaceCaptureDlg : public CDialogEx LRESULT OnEIDRemoteVideoStateChanged(WPARAM wParam, LPARAM lParam); LRESULT OnEIDExtensionEvent(WPARAM wParam, LPARAM lParam); LRESULT OnEIDonCaptureVideoFrame(WPARAM wParam, LPARAM lParam); + LRESULT OnEIDonFaceInfo(WPARAM wParam, LPARAM lParam); DECLARE_MESSAGE_MAP() public: CStatic m_staVideoArea; diff --git a/windows/APIExample/APIExample/Advanced/LocalVideoTranscoding/CLocalVideoTranscodingDlg.cpp b/windows/APIExample/APIExample/Advanced/LocalVideoTranscoding/CLocalVideoTranscodingDlg.cpp index c942eddfd..068aac922 100755 --- a/windows/APIExample/APIExample/Advanced/LocalVideoTranscoding/CLocalVideoTranscodingDlg.cpp +++ b/windows/APIExample/APIExample/Advanced/LocalVideoTranscoding/CLocalVideoTranscodingDlg.cpp @@ -92,19 +92,22 @@ void CLocalVideoTranscodingDlg::OnBnClickedButtonJoinchannel() config.format.height = 360; config.format.fps = 15; //get selected camera device id + char* buffer = new char[512] {0}; for (UINT i = 0; i < m_vecCameraInfos.size(); i++) { LOCALVIDEOTRANSCODING_CAMERAINFO info = m_vecCameraInfos[i]; CString strName; m_cmbCamera.GetWindowText(strName); if (info.deviceName.compare(cs2utf8(strName)) == 0) { - strcpy_s(config.deviceId, 512, info.deviceId.c_str()); + strcpy_s(buffer, 512, info.deviceId.c_str()); + config.deviceId = buffer; break; } } //start primary camera capture int ret = m_rtcEngine->startCameraCapture(VIDEO_SOURCE_CAMERA_PRIMARY, config); + delete[] buffer; m_lstInfo.InsertString(m_lstInfo.GetCount(), _T("start primary camera capture")); //start Screen capture diff --git a/windows/APIExample/APIExample/Advanced/MediaEncrypt/CAgoraMediaEncryptDlg.cpp b/windows/APIExample/APIExample/Advanced/MediaEncrypt/CAgoraMediaEncryptDlg.cpp index 6bbf0d102..b59f1b842 100755 --- a/windows/APIExample/APIExample/Advanced/MediaEncrypt/CAgoraMediaEncryptDlg.cpp +++ b/windows/APIExample/APIExample/Advanced/MediaEncrypt/CAgoraMediaEncryptDlg.cpp @@ -239,11 +239,33 @@ void CAgoraMediaEncryptDlg::OnBnClickedButtonJoinchannel() m_lstInfo.InsertString(m_lstInfo.GetCount(), strInfo); } + +void decodeBase64(const std::string& encoded, std::vector& out) +{ + auto unpadded = encoded; + + const auto num_padded = std::count(begin(encoded), end(encoded), '='); + std::replace(begin(unpadded), end(unpadded), '=', 'A'); // A_64 == \0 + + std::string decoded{ detail::BinaryFromBase64{begin(unpadded)}, + detail::BinaryFromBase64{begin(unpadded) + unpadded.length()} }; + + decoded.erase(end(decoded) - num_padded, end(decoded)); + std::copy(begin(decoded), end(decoded), out.begin()); +} + //suggest generate secret using openssl on server -std::string getEncryptionSaltFromServer() +std::vector getEncryptionSaltFromServer() { - return "EncryptionKdfSaltInBase64Strings"; + std::string kdfSaltBase64 = "NiIeJ08AbtcQVjvV+oOEvF/4Dz5dy1CIwa805C8J2w0="; + std::vector kdfSalt; + for (int i = 0; i < 32; ++i) { + kdfSalt.push_back(0); + } + decodeBase64(kdfSaltBase64, kdfSalt); + return kdfSalt; } + //set media encrypt button click handler void CAgoraMediaEncryptDlg::OnBnClickedButtonSetMediaEncrypt() { @@ -259,7 +281,7 @@ void CAgoraMediaEncryptDlg::OnBnClickedButtonSetMediaEncrypt() EncryptionConfig config; config.encryptionMode = m_mapEncryptMode[encryption.c_str()]; config.encryptionKey = secret.c_str(); - memcpy(config.encryptionKdfSalt, getEncryptionSaltFromServer().c_str(), 32); + memcpy(config.encryptionKdfSalt, getEncryptionSaltFromServer().data(), 32); //set encrypt mode m_rtcEngine->enableEncryption(true, config); CString strInfo; diff --git a/windows/APIExample/APIExample/Advanced/MediaEncrypt/CAgoraMediaEncryptDlg.h b/windows/APIExample/APIExample/Advanced/MediaEncrypt/CAgoraMediaEncryptDlg.h index d51fc0c89..0762af737 100755 --- a/windows/APIExample/APIExample/Advanced/MediaEncrypt/CAgoraMediaEncryptDlg.h +++ b/windows/APIExample/APIExample/Advanced/MediaEncrypt/CAgoraMediaEncryptDlg.h @@ -1,6 +1,23 @@ #pragma once #include "AGVideoWnd.h" #include +#include +#include +#include +#include +#include + + +namespace detail +{ + using Base64FromBinary = boost::archive::iterators::base64_from_binary< + boost::archive::iterators::transform_width>; + + using BinaryFromBase64 = boost::archive::iterators::transform_width< + boost::archive::iterators::binary_from_base64, + 8, 6>; +} + class CAgoraMediaEncryptHandler : public IRtcEngineEventHandler diff --git a/windows/APIExample/APIExample/Advanced/VideoMetadata/CAgoraMetaDataDlg.cpp b/windows/APIExample/APIExample/Advanced/Metadata/CAgoraMetaDataDlg.cpp old mode 100755 new mode 100644 similarity index 86% rename from windows/APIExample/APIExample/Advanced/VideoMetadata/CAgoraMetaDataDlg.cpp rename to windows/APIExample/APIExample/Advanced/Metadata/CAgoraMetaDataDlg.cpp index a0b317209..4807f2ac4 --- a/windows/APIExample/APIExample/Advanced/VideoMetadata/CAgoraMetaDataDlg.cpp +++ b/windows/APIExample/APIExample/Advanced/Metadata/CAgoraMetaDataDlg.cpp @@ -38,9 +38,9 @@ bool CAgoraMetaDataObserver::onReadyToSendMetadata(Metadata &metadata, VIDEO_SOU std::lock_guard lockSendData(g_mtxData); if (m_sendSEI.length() > 0) { memcpy_s(metadata.buffer, m_sendSEI.length(), m_sendSEI.c_str(), m_sendSEI.length()); - } metadata.size = m_sendSEI.length(); + m_sendSEI = ""; return true; } /* @@ -168,6 +168,15 @@ void CAgoraMetaDataEventHanlder::onRemoteVideoStateChanged(uid_t uid, REMOTE_VID } } +void CAgoraMetaDataEventHanlder::onAudioMetadataReceived(uid_t uid, const char* metadata, size_t length) +{ + if (m_hMsgHanlder && length > 0) { + char* buffer = new char[length + 1] {0}; + memcpy_s(buffer, length, metadata, length); + ::PostMessage(m_hMsgHanlder, WM_MSGID(EID_ON_RECV_AUDIO_METADATA), (WPARAM)buffer, uid); + } +} + // CAgoraMetaDataDlg dialog IMPLEMENT_DYNAMIC(CAgoraMetaDataDlg, CDialogEx) @@ -187,15 +196,15 @@ void CAgoraMetaDataDlg::DoDataExchange(CDataExchange* pDX) CDialogEx::DoDataExchange(pDX); DDX_Control(pDX, IDC_STATIC_CHANNELNAME, m_staChannelName); DDX_Control(pDX, IDC_BUTTON_JOINCHANNEL, m_btnJoinChannel); - DDX_Control(pDX, IDC_STATIC_SENDSEI, m_staSendSEI); - DDX_Control(pDX, IDC_EDIT_SEI, m_edtSendSEI); - DDX_Control(pDX, IDC_EDIT_RECV, m_edtRecvSEI); - DDX_Control(pDX, IDC_STATIC_METADATA_INFO, m_staMetaData); + DDX_Control(pDX, IDC_STATIC_VIDEO_METADATA, m_staVideoMetadata); + DDX_Control(pDX, IDC_EDIT_VIDEO_METADATA, m_edtVideoMetadata); + DDX_Control(pDX, IDC_BUTTON_SEND_VIDEO_METADATA, m_btnSendVideoMetadata); + DDX_Control(pDX, IDC_STATIC_AUDIO_METADATA, m_staAudioMetadata); + DDX_Control(pDX, IDC_EDIT_AUDIO_METADATA, m_edtAudioMetadata); + DDX_Control(pDX, IDC_BUTTON_SEND_AUDIO_METADATA, m_btnSendAudioMetadata); DDX_Control(pDX, IDC_LIST_INFO_BROADCASTING, m_lstInfo); DDX_Control(pDX, IDC_STATIC_VIDEO, m_staVideoArea); DDX_Control(pDX, IDC_EDIT_CHANNELNAME, m_edtChannelName); - DDX_Control(pDX, IDC_BUTTON_SEND, m_btnSendSEI); - DDX_Control(pDX, IDC_BUTTON_CLEAR, m_btnClear); } @@ -206,10 +215,11 @@ BEGIN_MESSAGE_MAP(CAgoraMetaDataDlg, CDialogEx) ON_MESSAGE(WM_MSGID(EID_USER_JOINED), &CAgoraMetaDataDlg::OnEIDUserJoined) ON_MESSAGE(WM_MSGID(EID_USER_OFFLINE), &CAgoraMetaDataDlg::OnEIDUserOffline) ON_MESSAGE(WM_MSGID(EID_REMOTE_VIDEO_STATE_CHANGED), &CAgoraMetaDataDlg::OnEIDRemoteVideoStateChanged) + ON_MESSAGE(WM_MSGID(EID_ON_RECV_AUDIO_METADATA), &CAgoraMetaDataDlg::OnEIDAudioMetadataReceived) ON_MESSAGE(WM_MSGID(RECV_METADATA_MSG), &CAgoraMetaDataDlg::OnEIDMetadataReceived) ON_WM_SHOWWINDOW() - ON_BN_CLICKED(IDC_BUTTON_SEND, &CAgoraMetaDataDlg::OnBnClickedButtonSend) - ON_BN_CLICKED(IDC_BUTTON_CLEAR, &CAgoraMetaDataDlg::OnBnClickedButtonClear) + ON_BN_CLICKED(IDC_BUTTON_SEND_VIDEO_METADATA, &CAgoraMetaDataDlg::OnBnClickedButtonSendVideoMetadata) + ON_BN_CLICKED(IDC_BUTTON_SEND_AUDIO_METADATA, &CAgoraMetaDataDlg::OnBnClickedButtonSendAudioMetadata) END_MESSAGE_MAP() @@ -238,6 +248,7 @@ void CAgoraMetaDataDlg::OnBnClickedButtonJoinchannel() strInfo.Format(_T("join channel %s, use ChannelMediaOptions"), getCurrentTime()); m_btnJoinChannel.EnableWindow(FALSE); } + } else { //leave channel in the engine. @@ -273,10 +284,10 @@ BOOL CAgoraMetaDataDlg::OnInitDialog() //set control text from config. void CAgoraMetaDataDlg::InitCtrlText() { - m_btnClear.SetWindowText(metadataCtrlBtnClear); - m_staMetaData.SetWindowText(videoSEIInformation); - m_staSendSEI.SetWindowText(metadataCtrlSendSEI); - m_btnSendSEI.SetWindowText(metadataCtrlBtnSend); + m_staVideoMetadata.SetWindowText(advancedMetadataVideo); + m_btnSendVideoMetadata.SetWindowText(advancedMetadataSend); + m_staAudioMetadata.SetWindowText(advancedMetadataAudio); + m_btnSendAudioMetadata.SetWindowText(advancedMetadataSend); m_staChannelName.SetWindowText(commonCtrlChannel); m_btnJoinChannel.SetWindowText(commonCtrlJoinChannel); } @@ -300,6 +311,7 @@ bool CAgoraMetaDataDlg::InitAgora() context.channelProfile = CHANNEL_PROFILE_LIVE_BROADCASTING; //initialize the Agora RTC engine context. int ret = m_rtcEngine->initialize(context); + m_rtcEngine->setParameters("{\"rtc.use_audio4\":true}"); if (ret != 0) { m_initialize = false; CString strInfo; @@ -314,6 +326,8 @@ bool CAgoraMetaDataDlg::InitAgora() m_rtcEngine->enableVideo(); m_lstInfo.InsertString(m_lstInfo.GetCount(), _T("enable video")); + m_rtcEngine->enableAudio(); + m_lstInfo.InsertString(m_lstInfo.GetCount(), _T("live broadcasting")); //set client role in the engine to the CLIENT_ROLE_BROADCASTER. m_rtcEngine->setClientRole(CLIENT_ROLE_BROADCASTER); @@ -458,20 +472,35 @@ LRESULT CAgoraMetaDataDlg::OnEIDRemoteVideoStateChanged(WPARAM wParam, LPARAM lP LRESULT CAgoraMetaDataDlg::OnEIDMetadataReceived(WPARAM wParam, LPARAM lParam) { IMetadataObserver::Metadata* metaData = (IMetadataObserver::Metadata*)wParam; - CString strInfo; - strInfo.Format(_T("onMetadataReceived:uid:%u, ts=%d, size:%d."), metaData->uid, metaData->timeStampMs, metaData->size); + CString strInfo; + strInfo.Format(_T("onMetadataReceived:uid:%u, ts=%d, size:%d."), metaData->uid, metaData->timeStampMs, metaData->size); + m_lstInfo.InsertString(m_lstInfo.GetCount(), strInfo); + + if (metaData->size > 0) { + strInfo.Format(_T("Info: %s"), utf82cs((char *)metaData->buffer)); + m_lstInfo.InsertString(m_lstInfo.GetCount(), strInfo); + } - if (metaData->size > 0) { - CString str; - str.Format(_T("Info: %s"), utf82cs((char *)metaData->buffer)); - strInfo += str; - } - m_edtRecvSEI.SetWindowText(strInfo); delete metaData->buffer; delete metaData; return 0; } +LRESULT CAgoraMetaDataDlg::OnEIDAudioMetadataReceived(WPARAM wParam, LPARAM lParam) +{ + uid_t uid = lParam; + char* metadata = (char*)wParam; + CString strInfo; + strInfo.Format(_T("onAuidoMetadataReceived:uid:%u, size:%d."), uid, sizeof(metadata)); + m_lstInfo.InsertString(m_lstInfo.GetCount(), strInfo); + + strInfo.Format(_T("Info: %s"), utf82cs(metadata)); + m_lstInfo.InsertString(m_lstInfo.GetCount(), strInfo); + + delete metadata; + return 0; +} + //render local video from SDK local capture. void CAgoraMetaDataDlg::RenderLocalVideo() { @@ -494,8 +523,8 @@ void CAgoraMetaDataDlg::ResumeStatus() m_lstInfo.ResetContent(); m_metaDataObserver.SetSendSEI(""); m_edtChannelName.SetWindowText(_T("")); - m_edtSendSEI.SetWindowText(_T("")); - m_edtRecvSEI.SetWindowText(_T("")); + m_edtVideoMetadata.SetWindowText(_T("")); + m_edtAudioMetadata.SetWindowText(_T("")); m_joinChannel = false; m_initialize = false; m_remoteJoined = false; @@ -514,10 +543,10 @@ void CAgoraMetaDataDlg::OnShowWindow(BOOL bShow, UINT nStatus) } //send button handler. -void CAgoraMetaDataDlg::OnBnClickedButtonSend() +void CAgoraMetaDataDlg::OnBnClickedButtonSendVideoMetadata() { CString strSend; - m_edtSendSEI.GetWindowText(strSend); + m_edtVideoMetadata.GetWindowText(strSend); if (strSend.IsEmpty()) return; std::string utf8msg = cs2utf8(strSend); @@ -526,11 +555,14 @@ void CAgoraMetaDataDlg::OnBnClickedButtonSend() } //clear button handler. -void CAgoraMetaDataDlg::OnBnClickedButtonClear() +void CAgoraMetaDataDlg::OnBnClickedButtonSendAudioMetadata() { - m_edtSendSEI.SetWindowText(_T("")); - //set send message string. - m_metaDataObserver.SetSendSEI(""); + CString strSend; + m_edtAudioMetadata.GetWindowText(strSend); + if (strSend.IsEmpty()) + return; + std::string utf8msg = cs2utf8(strSend); + m_rtcEngine->sendAudioMetadata(utf8msg.c_str(), utf8msg.length()); } diff --git a/windows/APIExample/APIExample/Advanced/VideoMetadata/CAgoraMetaDataDlg.h b/windows/APIExample/APIExample/Advanced/Metadata/CAgoraMetaDataDlg.h similarity index 93% rename from windows/APIExample/APIExample/Advanced/VideoMetadata/CAgoraMetaDataDlg.h rename to windows/APIExample/APIExample/Advanced/Metadata/CAgoraMetaDataDlg.h index f77b9059c..7da4da8f0 100644 --- a/windows/APIExample/APIExample/Advanced/VideoMetadata/CAgoraMetaDataDlg.h +++ b/windows/APIExample/APIExample/Advanced/Metadata/CAgoraMetaDataDlg.h @@ -114,6 +114,10 @@ class CAgoraMetaDataEventHanlder : public IRtcEngineEventHandler SDK triggers this callback. */ virtual void onRemoteVideoStateChanged(uid_t uid, REMOTE_VIDEO_STATE state, REMOTE_VIDEO_STATE_REASON reason, int elapsed) override; + + + virtual void onAudioMetadataReceived(uid_t uid, const char* metadata, size_t length) override; + private: HWND m_hMsgHanlder; }; @@ -147,6 +151,7 @@ class CAgoraMetaDataDlg : public CDialogEx afx_msg LRESULT OnEIDUserOffline(WPARAM wParam, LPARAM lParam); afx_msg LRESULT OnEIDRemoteVideoStateChanged(WPARAM wParam, LPARAM lParam); afx_msg LRESULT OnEIDMetadataReceived(WPARAM wParam, LPARAM lParam); + afx_msg LRESULT OnEIDAudioMetadataReceived(WPARAM wParam, LPARAM lParam); protected: virtual void DoDataExchange(CDataExchange* pDX); // DDX/DDV support @@ -154,10 +159,12 @@ class CAgoraMetaDataDlg : public CDialogEx public: CStatic m_staChannelName; CButton m_btnJoinChannel; - CStatic m_staSendSEI; - CEdit m_edtSendSEI; - CEdit m_edtRecvSEI; - CStatic m_staMetaData; + CStatic m_staVideoMetadata; + CEdit m_edtVideoMetadata; + CStatic m_btnSendVideoMetadata; + CStatic m_staAudioMetadata; + CEdit m_edtAudioMetadata; + CStatic m_btnSendAudioMetadata; CListBox m_lstInfo; afx_msg void OnBnClickedButtonJoinchannel(); virtual BOOL OnInitDialog(); @@ -177,9 +184,7 @@ class CAgoraMetaDataDlg : public CDialogEx CStatic m_staVideoArea; CEdit m_edtChannelName; afx_msg void OnShowWindow(BOOL bShow, UINT nStatus); - CButton m_btnSendSEI; - afx_msg void OnBnClickedButtonSend(); - afx_msg void OnBnClickedButtonClear(); + afx_msg void OnBnClickedButtonSendVideoMetadata(); + afx_msg void OnBnClickedButtonSendAudioMetadata(); virtual BOOL PreTranslateMessage(MSG* pMsg); - CButton m_btnClear; }; diff --git a/windows/APIExample/APIExample/Advanced/MultiVideoSource/CAgoraMutilVideoSourceDlg.cpp b/windows/APIExample/APIExample/Advanced/MultiVideoSource/CAgoraMutilVideoSourceDlg.cpp index 1cd31a2de..64856b9e8 100755 --- a/windows/APIExample/APIExample/Advanced/MultiVideoSource/CAgoraMutilVideoSourceDlg.cpp +++ b/windows/APIExample/APIExample/Advanced/MultiVideoSource/CAgoraMutilVideoSourceDlg.cpp @@ -83,6 +83,7 @@ BOOL CAgoraMutilVideoSourceDlg::OnInitDialog() } m_maxVideoCount = 6; ShowVideoWnds(); + ResumeStatus(); return TRUE; // return TRUE unless you set the focus to a control } @@ -381,7 +382,13 @@ void CAgoraMutilVideoSourceDlg::UnInitAgora() void CAgoraMutilVideoSourceDlg::OnShowWindow(BOOL bShow, UINT nStatus) { CDialogEx::OnShowWindow(bShow, nStatus); - ResumeStatus(); + if (bShow)//bShwo is true ,show window + { + InitCtrlText(); + } + else { + ResumeStatus(); + } } LRESULT CAgoraMutilVideoSourceDlg::OnEIDJoinChannelSuccess(WPARAM wParam, LPARAM lParam) @@ -676,17 +683,22 @@ void CAgoraMutilVideoSourceDlg::OnBnClickedButtonCamera1() //get selected camera device id CString strName; m_cmbCameras.GetWindowText(strName); + char* buffer = new char[512] {0}; for (UINT i = 0; i < m_vecCameraInfos.size(); i++) { MULTIVIDEOSOURCE_CAMERAINFO info = m_vecCameraInfos[i]; if (info.deviceName.compare(cs2utf8(strName)) == 0) { - strcpy_s(config.deviceId, 512, info.deviceId.c_str()); + strcpy_s(buffer, 512, info.deviceId.c_str()); + config.deviceId = buffer; break; } } //start primary camera capture - m_rtcEngine->startCameraCapture(VIDEO_SOURCE_CAMERA_PRIMARY, config); - m_lstInfo.InsertString(m_lstInfo.GetCount(), _T("start primary camera capture")); + CString infoStr; + int ret = m_rtcEngine->startCameraCapture(VIDEO_SOURCE_CAMERA_PRIMARY, config); + delete[] buffer; + infoStr.Format(_T("start primary camera capture. ret=%d"), ret); + m_lstInfo.InsertString(m_lstInfo.GetCount(), infoStr); VideoCanvas canvas; canvas.uid = 0; canvas.sourceType = VIDEO_SOURCE_CAMERA_PRIMARY; @@ -751,19 +763,24 @@ void CAgoraMutilVideoSourceDlg::OnBnClickedButtonCamera2() config2.format.fps = 15; //set camera2 deviceId CString strName; + char* buffer = new char[512] {0}; m_cmbCamera2.GetWindowText(strName); for (UINT i = 0; i < m_vecCameraInfos.size(); i++) { MULTIVIDEOSOURCE_CAMERAINFO info = m_vecCameraInfos[i]; if (info.deviceName.compare(cs2utf8(strName)) == 0) { - strcpy_s(config2.deviceId, 512, info.deviceId.c_str()); + strcpy_s(buffer, 512, info.deviceId.c_str()); + config2.deviceId = buffer; break; } } //start secondary camera capture - m_rtcEngine->startCameraCapture(VIDEO_SOURCE_CAMERA_SECONDARY, config2); - m_lstInfo.InsertString(m_lstInfo.GetCount(), _T("start secondary camera capture")); + CString infoStr; + int ret = m_rtcEngine->startCameraCapture(VIDEO_SOURCE_CAMERA_SECONDARY, config2); + delete[] buffer; + infoStr.Format(_T("start secondary camera capture. ret=%d"), ret); + m_lstInfo.InsertString(m_lstInfo.GetCount(), infoStr); m_btnCapture2.SetWindowText(MultiVideoSourceStopCapture); VideoCanvas canvas; canvas.uid = connection2.localUid; @@ -788,7 +805,7 @@ void CAgoraMutilVideoSourceDlg::OnBnClickedButtonCamera2() options2.clientRoleType = CLIENT_ROLE_BROADCASTER; // joinChannelEx secondary camera capture(broadcaster) connection2.channelId = szChannelId.data(); - int ret = m_rtcEngine->joinChannelEx(APP_TOKEN, connection2, options2, &m_camera2EventHandler); + ret = m_rtcEngine->joinChannelEx(APP_TOKEN, connection2, options2, &m_camera2EventHandler); CString str; str.Format(_T("joinChannelEx: %d"), ret); m_lstInfo.InsertString(m_lstInfo.GetCount(), str); @@ -856,20 +873,25 @@ void CAgoraMutilVideoSourceDlg::OnBnClickedButtonCamera3() config.format.fps = 15; //set camera2 deviceId CString strName; + char* buffer = new char[512] {0}; m_cmbCamera3.GetWindowText(strName); for (UINT i = 0; i < m_vecCameraInfos.size(); i++) { MULTIVIDEOSOURCE_CAMERAINFO info = m_vecCameraInfos[i]; if (info.deviceName.compare(cs2utf8(strName)) == 0) { - strcpy_s(config.deviceId, 512, info.deviceId.c_str()); + strcpy_s(buffer, 512, info.deviceId.c_str()); + config.deviceId = buffer; break; } } //start secondary camera capture - m_rtcEngine->startCameraCapture(VIDEO_SOURCE_CAMERA_THIRD, config); - m_lstInfo.InsertString(m_lstInfo.GetCount(), _T("start third camera capture")); + CString infoStr; + int ret = m_rtcEngine->startCameraCapture(VIDEO_SOURCE_CAMERA_THIRD, config); + delete[] buffer; + infoStr.Format(_T("start third camera capture. ret=%d"), ret); + m_lstInfo.InsertString(m_lstInfo.GetCount(), infoStr); m_btnCapture3.SetWindowText(MultiVideoSourceStopCapture); VideoCanvas canvas; canvas.uid = connection3.localUid; @@ -895,7 +917,7 @@ void CAgoraMutilVideoSourceDlg::OnBnClickedButtonCamera3() options3.clientRoleType = CLIENT_ROLE_BROADCASTER; // joinChannelEx secondary camera capture(broadcaster) connection3.channelId = szChannelId.data(); - int ret = m_rtcEngine->joinChannelEx(APP_TOKEN, connection3, options3, &m_camera3EventHandler); + ret = m_rtcEngine->joinChannelEx(APP_TOKEN, connection3, options3, &m_camera3EventHandler); CString str; str.Format(_T("joinChannelEx: %d"), ret); m_lstInfo.InsertString(m_lstInfo.GetCount(), str); @@ -964,11 +986,13 @@ void CAgoraMutilVideoSourceDlg::OnBnClickedButtonCamera4() //set camera2 deviceId CString strName; m_cmbCamera4.GetWindowText(strName); + char* buffer = new char[512] {0}; for (UINT i = 0; i < m_vecCameraInfos.size(); i++) { MULTIVIDEOSOURCE_CAMERAINFO info = m_vecCameraInfos[i]; if (info.deviceName.compare(cs2utf8(strName)) == 0) { - strcpy_s(config.deviceId, 512, info.deviceId.c_str()); + strcpy_s(buffer, 512, info.deviceId.c_str()); + config.deviceId = buffer; break; } } @@ -977,8 +1001,11 @@ void CAgoraMutilVideoSourceDlg::OnBnClickedButtonCamera4() m_camera4EventHandler.SetMsgReceiver(m_hWnd); //start secondary camera capture - m_rtcEngine->startCameraCapture(VIDEO_SOURCE_CAMERA_FOURTH, config); - m_lstInfo.InsertString(m_lstInfo.GetCount(), _T("start fourth camera capture")); + CString infoStr; + int ret = m_rtcEngine->startCameraCapture(VIDEO_SOURCE_CAMERA_FOURTH, config); + delete[] buffer; + infoStr.Format(_T("start fourth camera capture. ret=%d"), ret); + m_lstInfo.InsertString(m_lstInfo.GetCount(), infoStr); m_btnCapture4.SetWindowText(MultiVideoSourceStopCapture); VideoCanvas canvas; canvas.uid = connection4.localUid; @@ -1001,7 +1028,7 @@ void CAgoraMutilVideoSourceDlg::OnBnClickedButtonCamera4() options4.clientRoleType = CLIENT_ROLE_BROADCASTER; // joinChannelEx secondary camera capture(broadcaster) connection4.channelId = szChannelId.data(); - int ret = m_rtcEngine->joinChannelEx(APP_TOKEN, connection4, options4, &m_camera4EventHandler); + ret = m_rtcEngine->joinChannelEx(APP_TOKEN, connection4, options4, &m_camera4EventHandler); CString str; str.Format(_T("joinChannelEx: %d"), ret); m_lstInfo.InsertString(m_lstInfo.GetCount(), str); diff --git a/windows/APIExample/APIExample/Advanced/MultiVideoSourceTracks/MultiVideoSourceTracks.cpp b/windows/APIExample/APIExample/Advanced/MultiVideoSourceTracks/MultiVideoSourceTracks.cpp index ea48592e8..45928632c 100755 --- a/windows/APIExample/APIExample/Advanced/MultiVideoSourceTracks/MultiVideoSourceTracks.cpp +++ b/windows/APIExample/APIExample/Advanced/MultiVideoSourceTracks/MultiVideoSourceTracks.cpp @@ -70,6 +70,7 @@ BOOL MultiVideoSourceTracks::OnInitDialog() int version = avformat_version(); avformat_network_init(); + ResumeStatus(); return TRUE; // return TRUE unless you set the focus to a control } @@ -253,10 +254,12 @@ void MultiVideoSourceTracks::ResumeStatus() { InitCtrlText(); - + m_initialize = false; m_joinChannel = false; m_btnJoinChannel.EnableWindow(TRUE); + m_edtChannelName.SetWindowTextW(_T("")); + m_lstInfo.ResetContent(); // Reset windows for (int i = 0; i < VIDEO_WINDOWS_SIZE; i++) @@ -334,7 +337,13 @@ void MultiVideoSourceTracks::UnInitAgora() void MultiVideoSourceTracks::OnShowWindow(BOOL bShow, UINT nStatus) { CDialogEx::OnShowWindow(bShow, nStatus); - ResumeStatus(); + if (bShow)//bShwo is true ,show window + { + InitCtrlText(); + } + else { + ResumeStatus(); + } } diff --git a/windows/APIExample/APIExample/Advanced/OriginalVideo/CAgoraOriginalVideoDlg.h b/windows/APIExample/APIExample/Advanced/OriginalVideo/CAgoraOriginalVideoDlg.h index 2b21b424a..ef100b254 100755 --- a/windows/APIExample/APIExample/Advanced/OriginalVideo/CAgoraOriginalVideoDlg.h +++ b/windows/APIExample/APIExample/Advanced/OriginalVideo/CAgoraOriginalVideoDlg.h @@ -49,7 +49,13 @@ class CGrayVideoProcFrameObserver : virtual bool onMediaPlayerVideoFrame(VideoFrame& videoFrame, int mediaPlayerId) override { return true; } virtual bool onPreEncodeVideoFrame(agora::rtc::VIDEO_SOURCE_TYPE type, VideoFrame& videoFrame) { return true; } + virtual VIDEO_FRAME_PROCESS_MODE getVideoFrameProcessMode() { + return VIDEO_FRAME_PROCESS_MODE::PROCESS_MODE_READ_WRITE; + } + virtual uint32_t getObservedFramePosition() { + return agora::media::base::POSITION_POST_CAPTURER; + } }; @@ -104,6 +110,13 @@ class CAverageFilterVideoProcFrameObserver : virtual bool onMediaPlayerVideoFrame(VideoFrame& videoFrame, int mediaPlayerId) override { return true; } virtual bool onPreEncodeVideoFrame(agora::rtc::VIDEO_SOURCE_TYPE type, VideoFrame& videoFrame) { return true; } + virtual VIDEO_FRAME_PROCESS_MODE getVideoFrameProcessMode() { + return VIDEO_FRAME_PROCESS_MODE::PROCESS_MODE_READ_WRITE; + } + + virtual uint32_t getObservedFramePosition() { + return agora::media::base::POSITION_POST_CAPTURER; + } }; diff --git a/windows/APIExample/APIExample/Advanced/ScreenShare/AgoraScreenCapture.cpp b/windows/APIExample/APIExample/Advanced/ScreenShare/AgoraScreenCapture.cpp index 4997e47c2..93d59c6ca 100755 --- a/windows/APIExample/APIExample/Advanced/ScreenShare/AgoraScreenCapture.cpp +++ b/windows/APIExample/APIExample/Advanced/ScreenShare/AgoraScreenCapture.cpp @@ -258,8 +258,8 @@ BEGIN_MESSAGE_MAP(CAgoraScreenCapture, CDialogEx) ON_CBN_DROPDOWN(IDC_COMBO_SCREEN_CAPTURE, &CAgoraScreenCapture::OnCbnDropDownComboScreenCapture) ON_CBN_SELCHANGE(IDC_COMBO_SCREEN_CAPTURE_SCENARIO, &CAgoraScreenCapture::OnCbnSelchangeComboScreenCaptureScenario) ON_NOTIFY(NM_RELEASEDCAPTURE, IDC_SLIDER_FPS, &CAgoraScreenCapture::OnNMCustomdrawSliderFps) - ON_NOTIFY(NM_CUSTOMDRAW, IDC_SLIDER_BITRATE, &CAgoraScreenCapture::OnNMCustomdrawSliderBitrate) - ON_NOTIFY(NM_CUSTOMDRAW, IDC_SLIDER_SCALE, &CAgoraScreenCapture::OnNMCustomdrawSliderScale) + ON_NOTIFY(NM_RELEASEDCAPTURE, IDC_SLIDER_BITRATE, &CAgoraScreenCapture::OnNMCustomdrawSliderBitrate) + ON_NOTIFY(NM_RELEASEDCAPTURE, IDC_SLIDER_SCALE, &CAgoraScreenCapture::OnNMCustomdrawSliderScale) ON_CBN_DROPDOWN(IDC_COMBO_HWND, &CAgoraScreenCapture::OnCbnDropDownComboHwnd) ON_CBN_SELCHANGE(IDC_COMBO_HWND, &CAgoraScreenCapture::OnCbnSelchangeComboHwnd) ON_BN_CLICKED(IDC_BUTTON_HWND_EXECLUDE, &CAgoraScreenCapture::OnBnClickedButtonHwndExeclude) diff --git a/windows/APIExample/APIExample/Advanced/SpatialAudio/CAgoraSpatialAudioDlg.cpp b/windows/APIExample/APIExample/Advanced/SpatialAudio/CAgoraSpatialAudioDlg.cpp index bb04f6fe7..032a14c11 100755 --- a/windows/APIExample/APIExample/Advanced/SpatialAudio/CAgoraSpatialAudioDlg.cpp +++ b/windows/APIExample/APIExample/Advanced/SpatialAudio/CAgoraSpatialAudioDlg.cpp @@ -317,7 +317,7 @@ BEGIN_MESSAGE_MAP(CAgoraSpatialAudioDlg, CDialogEx) ON_BN_CLICKED(IDC_CHECK_AUDIO_SOURCE_MUTE, &CAgoraSpatialAudioDlg::OnBnClickedCheckAudioSourceMute) ON_BN_CLICKED(IDC_CHECK_AUDIO_SOURCE_BLUR, &CAgoraSpatialAudioDlg::OnBnClickedCheckAudioSourceBlur) ON_BN_CLICKED(IDC_CHECK_AUDIO_SOURCE_AIRBORNE, &CAgoraSpatialAudioDlg::OnBnClickedCheckAudioSourceAirborne) - ON_NOTIFY(NM_CUSTOMDRAW, IDC_SLIDER_AUDIO_SOURCE_ATTENUATION, &CAgoraSpatialAudioDlg::OnNMCustomdrawSliderAudioSourceAttenuation) + ON_NOTIFY(NM_RELEASEDCAPTURE, IDC_SLIDER_AUDIO_SOURCE_ATTENUATION, &CAgoraSpatialAudioDlg::OnNMCustomdrawSliderAudioSourceAttenuation) ON_BN_CLICKED(IDC_CHECK_AUDIO_ZONE, &CAgoraSpatialAudioDlg::OnBnClickedCheckAudioZone) ON_BN_CLICKED(IDC_CHECK_MUTE_LOCAL_AUDIO, &CAgoraSpatialAudioDlg::OnBnClickedCheckMuteLocalAudio) END_MESSAGE_MAP() diff --git a/windows/APIExample/APIExample/Basic/LiveBroadcasting/CLiveBroadcastingDlg.cpp b/windows/APIExample/APIExample/Basic/LiveBroadcasting/CLiveBroadcastingDlg.cpp index 55acb79df..a719912d1 100755 --- a/windows/APIExample/APIExample/Basic/LiveBroadcasting/CLiveBroadcastingDlg.cpp +++ b/windows/APIExample/APIExample/Basic/LiveBroadcasting/CLiveBroadcastingDlg.cpp @@ -182,7 +182,7 @@ BEGIN_MESSAGE_MAP(CLiveBroadcastingDlg, CDialogEx) ON_BN_CLICKED(IDC_RADIO_CANVAS_HIDDEN, &CLiveBroadcastingDlg::OnBnClickedRadioCanvasRenderMode) ON_BN_CLICKED(IDC_RADIO_CANVAS_FIT, &CLiveBroadcastingDlg::OnBnClickedRadioCanvasRenderMode) ON_BN_CLICKED(IDC_RADIO_CANVAS_ADAPTIVE, &CLiveBroadcastingDlg::OnBnClickedRadioCanvasRenderMode) - ON_NOTIFY(NM_CUSTOMDRAW, IDC_SLIDER_CANVAS_COLOR, &CLiveBroadcastingDlg::OnNMCustomdrawSliderCanvasColor) + ON_NOTIFY(NM_RELEASEDCAPTURE, IDC_SLIDER_CANVAS_COLOR, &CLiveBroadcastingDlg::OnNMCustomdrawSliderCanvasColor) ON_BN_CLICKED(IDC_CHECK_VIDEO_IMAGE, &CLiveBroadcastingDlg::OnBnClickedCheckVideoImage) END_MESSAGE_MAP() diff --git a/windows/APIExample/APIExample/Language.h b/windows/APIExample/APIExample/Language.h index 013420358..be0021ae4 100755 --- a/windows/APIExample/APIExample/Language.h +++ b/windows/APIExample/APIExample/Language.h @@ -24,7 +24,7 @@ extern wchar_t basicLiveBroadcasting[INFO_LEN]; extern wchar_t basicJoinChannelVideoByToken[INFO_LEN]; extern wchar_t advancedRtmpInject[INFO_LEN]; extern wchar_t advancedRtmpStreaming[INFO_LEN]; -extern wchar_t advancedVideoMetadata[INFO_LEN]; +extern wchar_t advancedMetadata[INFO_LEN]; extern wchar_t advancedCustomEncrypt[INFO_LEN]; extern wchar_t advancedScreenCap[INFO_LEN]; @@ -342,3 +342,8 @@ extern wchar_t mediaRecorderStartRecording[INFO_LEN]; extern wchar_t mediaRecorderStopRecording[INFO_LEN]; extern wchar_t localVideoTranscodingVirtualBg[INFO_LEN]; + +// metadata +extern wchar_t advancedMetadataVideo[INFO_LEN]; +extern wchar_t advancedMetadataAudio[INFO_LEN]; +extern wchar_t advancedMetadataSend[INFO_LEN]; diff --git a/windows/APIExample/APIExample/en.ini b/windows/APIExample/APIExample/en.ini index a1d18c559..02ccca071 100755 --- a/windows/APIExample/APIExample/en.ini +++ b/windows/APIExample/APIExample/en.ini @@ -19,7 +19,7 @@ Basic.LiveBroadcasting=LiveBroadcasting Basic.JoinChannelVideoByToken=JoinChannelVideoByToken Advanced.RtmpStreaming=Rtmp Streaming Advanced.RtmpInject=Rtmp Inject -Advanced.Metadata=Video SEI +Advanced.Metadata=Metadata Advanced.Beauty=Beauty Advanced.BeautyAudio=Beauty Audio @@ -299,4 +299,8 @@ SpatialAudio.Prepare.Info=Preparing 3D Effect... MediaRecoder.StartRecording=StartRecording MediaRecoder.StopRecording=StopRecording -LocalVideoTranscoding.VirtualBg=TranslusionBackground \ No newline at end of file +LocalVideoTranscoding.VirtualBg=TranslusionBackground + +Metadata.Video=Video Metadata +Metadata.Audio=Audio Metadata +Metadata.Send=Send \ No newline at end of file diff --git a/windows/APIExample/APIExample/resource.h b/windows/APIExample/APIExample/resource.h index 9d77aacc2..3302b60af 100755 --- a/windows/APIExample/APIExample/resource.h +++ b/windows/APIExample/APIExample/resource.h @@ -84,6 +84,7 @@ #define IDC_COMBO_PERSONS2 1022 #define IDC_COMBO_ENCODER 1022 #define IDC_BUTTON_LOCALRECORDER 1022 +#define IDC_STATIC_VIDEO_METADATA 1022 #define IDC_EDIT_INJECT_URL 1023 #define IDC_EDIT_SEI 1023 #define IDC_EDIT_BEAUTY_REDNESS 1023 @@ -94,6 +95,7 @@ #define IDC_BUTTON_REMOTERECORDER 1023 #define IDC_STATIC_FPS_VALUE 1023 #define IDC_EDIT_CHANNELNAME2 1023 +#define IDC_EDIT_AUDIO_METADATA 1023 #define IDC_BUTTON_ADDSTREAM 1024 #define IDC_BUTTON_SEND 1024 #define IDC_EDIT_BEAUTY_SMOOTHNESS 1024 @@ -101,6 +103,7 @@ #define IDC_EDIT_AUDIO_AGIN 1024 #define IDC_BUTTON_OPEN 1024 #define IDC_STATIC_CHANNELNAME3 1024 +#define IDC_BUTTON_SEND_VIDEO_METADATA 1024 #define IDC_EDIT_AUDIO_PITCH 1025 #define IDC_BUTTON_REMOVE_STREAM 1025 #define IDC_EDIT_RECV 1025 @@ -108,11 +111,15 @@ #define IDC_BUTTON_STOP 1025 #define IDC_STATIC_BITRATE_VALUE 1025 #define IDC_EDIT_CHANNELNAME3 1025 +#define IDC_STATIC_AUDIO_METADATA 1025 #define IDC_BUTTON_REMOVE_ALLSTREAM 1026 #define IDC_BUTTON_PLAY 1026 #define IDC_STATIC_SCALE 1026 +#define IDC_EDIT_SEI2 1026 +#define IDC_EDIT_VIDEO_METADATA 1026 #define IDC_BUTTON_ATTACH 1027 #define IDC_STATIC_HWND 1027 +#define IDC_BUTTON_SEND_AUDIO_METADATA 1027 #define IDC_BUTTON_PUBLISH_VIDEO 1028 #define IDC_STATIC_SCALE_VALUE 1028 #define IDC_BUTTON_PUBLISH_AUDIO 1029 diff --git a/windows/APIExample/APIExample/stdafx.cpp b/windows/APIExample/APIExample/stdafx.cpp index fabf09579..57cfaf651 100755 --- a/windows/APIExample/APIExample/stdafx.cpp +++ b/windows/APIExample/APIExample/stdafx.cpp @@ -25,7 +25,7 @@ wchar_t basicLiveBroadcasting[INFO_LEN] = { 0 }; wchar_t basicJoinChannelVideoByToken[INFO_LEN] = { 0 }; wchar_t advancedRtmpInject[INFO_LEN] = { 0 }; wchar_t advancedRtmpStreaming[INFO_LEN] = { 0 }; -wchar_t advancedVideoMetadata[INFO_LEN] = { 0 }; +wchar_t advancedMetadata[INFO_LEN] = { 0 }; wchar_t advancedCustomEncrypt[INFO_LEN] = { 0 }; wchar_t advancedScreenCap[INFO_LEN] = { 0 }; @@ -312,6 +312,11 @@ wchar_t mediaRecorderStopRecording[INFO_LEN] = { 0 }; wchar_t localVideoTranscodingVirtualBg[INFO_LEN] = { 0 }; +// metadata +wchar_t advancedMetadataVideo[INFO_LEN] = { 0 }; +wchar_t advancedMetadataAudio[INFO_LEN] = { 0 }; +wchar_t advancedMetadataSend[INFO_LEN] = { 0 }; + std::string cs2utf8(CString str) { char szBuf[2 * MAX_PATH] = { 0 }; @@ -445,7 +450,7 @@ void InitKeyInfomation() //advanced scene list _tcscpy_s(advancedRtmpInject, INFO_LEN, Str(_T("Advanced.RtmpInject"))); _tcscpy_s(advancedRtmpStreaming, INFO_LEN, Str(_T("Advanced.RtmpStreaming"))); - _tcscpy_s(advancedVideoMetadata, INFO_LEN, Str(_T("Advanced.Metadata"))); + _tcscpy_s(advancedMetadata, INFO_LEN, Str(_T("Advanced.Metadata"))); _tcscpy_s(advancedCustomEncrypt, INFO_LEN, Str(_T("Advanced.CustomEncrypt"))); _tcscpy_s(advancedScreenCap, INFO_LEN, Str(_T("Advanced.ScreenCap"))); @@ -753,6 +758,13 @@ void InitKeyInfomation() _tcscpy_s(mediaRecorderStopRecording, INFO_LEN, Str(_T("MediaRecoder.StopRecording"))); _tcscpy_s(localVideoTranscodingVirtualBg, INFO_LEN, Str(_T("LocalVideoTranscoding.VirtualBg"))); + + + // metadata + _tcscpy_s(advancedMetadataVideo, INFO_LEN, Str(_T("Metadata.Video"))); + _tcscpy_s(advancedMetadataAudio, INFO_LEN, Str(_T("Metadata.Audio"))); + _tcscpy_s(advancedMetadataSend, INFO_LEN, Str(_T("Metadata.Send"))); + /* _tcscpy_s(, INFO_LEN, Str(_T(""))); _tcscpy_s(, INFO_LEN, Str(_T(""))); _tcscpy_s(, INFO_LEN, Str(_T(""))); diff --git a/windows/APIExample/APIExample/stdafx.h b/windows/APIExample/APIExample/stdafx.h index c72a8974f..b444442a1 100644 --- a/windows/APIExample/APIExample/stdafx.h +++ b/windows/APIExample/APIExample/stdafx.h @@ -120,6 +120,9 @@ using namespace agora::media; #define EID_ON_CAPTURE_VIDEOFRAME 0x00000037 #define EID_ERROR 0x00000038 +#define EID_ON_FACE_INFO 0x00000039 +#define EID_ON_RECV_AUDIO_METADATA 0x0000003A + typedef struct _tagRtmpStreamStateChanged { char* url; diff --git a/windows/APIExample/APIExample/zh-cn.ini b/windows/APIExample/APIExample/zh-cn.ini index f47933e06..21c02b671 100755 --- a/windows/APIExample/APIExample/zh-cn.ini +++ b/windows/APIExample/APIExample/zh-cn.ini @@ -15,7 +15,7 @@ Basic.LiveBroadcasting=ֱ Basic.JoinChannelVideoByToken=Ƶֱ(Token) Advanced.RtmpStreaming=· Advanced.RtmpInject=ý -Advanced.Metadata=ƵSEI +Advanced.Metadata=ƵԪ Advanced.CustomEncryp=Զ Advanced.Beauty= Advanced.BeautyAudio= @@ -288,4 +288,9 @@ SpatialAudio.Prepare.Info= MediaRecoder.StartRecording=ʼ¼ MediaRecoder.StopRecording=ֹͣ¼ -LocalVideoTranscoding.VirtualBg=͸ \ No newline at end of file +LocalVideoTranscoding.VirtualBg=͸ + + +Metadata.Video=ƵԪ +Metadata.Audio=ƵԪ +Metadata.Send= \ No newline at end of file diff --git a/windows/APIExample/install.ps1 b/windows/APIExample/install.ps1 index 98593bba6..097b988ce 100644 --- a/windows/APIExample/install.ps1 +++ b/windows/APIExample/install.ps1 @@ -1,6 +1,6 @@ -$ThirdPartysrc = 'https://download.agora.io/staging/ThirdParty.zip' +$ThirdPartysrc = 'https://fullapp.oss-cn-beijing.aliyuncs.com/API-Examples/ThirdParty.zip' $ThirdPartydes = 'ThirdParty.zip' -$agora_sdk = 'https://download.agora.io/sdk/release/Agora_Native_SDK_for_Windows_v4.3.0_FULL.zip' +$agora_sdk = 'https://download.agora.io/sdk/release/Agora_Native_SDK_for_Windows_v4.3.1_FULL.zip' $agora_des = 'AgoraSdk.zip' $agora_local_sdk = '../../sdk'